var/home/core/zuul-output/0000755000175000017500000000000015070742601014527 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015070757166015507 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log0000644000000000000000005022654115070757157017720 0ustar rootrootOct 06 13:38:30 crc systemd[1]: Starting Kubernetes Kubelet... Oct 06 13:38:30 crc restorecon[4735]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:30 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 06 13:38:31 crc restorecon[4735]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 06 13:38:31 crc restorecon[4735]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Oct 06 13:38:31 crc kubenswrapper[4757]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Oct 06 13:38:31 crc kubenswrapper[4757]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Oct 06 13:38:31 crc kubenswrapper[4757]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Oct 06 13:38:31 crc kubenswrapper[4757]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Oct 06 13:38:31 crc kubenswrapper[4757]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Oct 06 13:38:31 crc kubenswrapper[4757]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.939259 4757 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.947590 4757 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.947631 4757 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.947642 4757 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.947651 4757 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.947660 4757 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.947668 4757 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.947677 4757 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.947685 4757 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.947694 4757 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.947701 4757 feature_gate.go:330] unrecognized feature gate: SignatureStores Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.947709 4757 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.947717 4757 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.947725 4757 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.947732 4757 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.947740 4757 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.947748 4757 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.947756 4757 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.947764 4757 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.947772 4757 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.947780 4757 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.947788 4757 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.947796 4757 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.947804 4757 feature_gate.go:330] unrecognized feature gate: NewOLM Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.947811 4757 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.947819 4757 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.947827 4757 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.947837 4757 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.947848 4757 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.947856 4757 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.947864 4757 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.947872 4757 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.947890 4757 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.947898 4757 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.947906 4757 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.947913 4757 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.947921 4757 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.947928 4757 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.947936 4757 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.947944 4757 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.947952 4757 feature_gate.go:330] unrecognized feature gate: PinnedImages Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.947959 4757 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.947967 4757 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.947975 4757 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.947982 4757 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.947990 4757 feature_gate.go:330] unrecognized feature gate: GatewayAPI Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.947997 4757 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.948005 4757 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.948013 4757 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.948020 4757 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.948029 4757 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.948037 4757 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.948045 4757 feature_gate.go:330] unrecognized feature gate: InsightsConfig Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.948055 4757 feature_gate.go:330] unrecognized feature gate: PlatformOperators Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.948066 4757 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.948075 4757 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.948083 4757 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.948126 4757 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.948136 4757 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.948145 4757 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.948153 4757 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.948166 4757 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.948175 4757 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.948183 4757 feature_gate.go:330] unrecognized feature gate: Example Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.948192 4757 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.948201 4757 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.948210 4757 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.948219 4757 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.948227 4757 feature_gate.go:330] unrecognized feature gate: OVNObservability Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.948235 4757 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.948243 4757 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.948251 4757 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.949358 4757 flags.go:64] FLAG: --address="0.0.0.0" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.949384 4757 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.949402 4757 flags.go:64] FLAG: --anonymous-auth="true" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.949414 4757 flags.go:64] FLAG: --application-metrics-count-limit="100" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.949425 4757 flags.go:64] FLAG: --authentication-token-webhook="false" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.949435 4757 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.949447 4757 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.949457 4757 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.949468 4757 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.949477 4757 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.949487 4757 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.949497 4757 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.949506 4757 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.949516 4757 flags.go:64] FLAG: --cgroup-root="" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.949525 4757 flags.go:64] FLAG: --cgroups-per-qos="true" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.949534 4757 flags.go:64] FLAG: --client-ca-file="" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.949543 4757 flags.go:64] FLAG: --cloud-config="" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.949552 4757 flags.go:64] FLAG: --cloud-provider="" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.949561 4757 flags.go:64] FLAG: --cluster-dns="[]" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.949572 4757 flags.go:64] FLAG: --cluster-domain="" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.949582 4757 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.949591 4757 flags.go:64] FLAG: --config-dir="" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.949600 4757 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.949609 4757 flags.go:64] FLAG: --container-log-max-files="5" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.949620 4757 flags.go:64] FLAG: --container-log-max-size="10Mi" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.949630 4757 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.949640 4757 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.949649 4757 flags.go:64] FLAG: --containerd-namespace="k8s.io" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.949658 4757 flags.go:64] FLAG: --contention-profiling="false" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.949667 4757 flags.go:64] FLAG: --cpu-cfs-quota="true" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.949676 4757 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.949685 4757 flags.go:64] FLAG: --cpu-manager-policy="none" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.949695 4757 flags.go:64] FLAG: --cpu-manager-policy-options="" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.949705 4757 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.949714 4757 flags.go:64] FLAG: --enable-controller-attach-detach="true" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.949723 4757 flags.go:64] FLAG: --enable-debugging-handlers="true" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.949733 4757 flags.go:64] FLAG: --enable-load-reader="false" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.949742 4757 flags.go:64] FLAG: --enable-server="true" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.949751 4757 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.949763 4757 flags.go:64] FLAG: --event-burst="100" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.949772 4757 flags.go:64] FLAG: --event-qps="50" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.949781 4757 flags.go:64] FLAG: --event-storage-age-limit="default=0" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.949790 4757 flags.go:64] FLAG: --event-storage-event-limit="default=0" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.949800 4757 flags.go:64] FLAG: --eviction-hard="" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.949822 4757 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.949832 4757 flags.go:64] FLAG: --eviction-minimum-reclaim="" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.949840 4757 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.949849 4757 flags.go:64] FLAG: --eviction-soft="" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.949858 4757 flags.go:64] FLAG: --eviction-soft-grace-period="" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.949867 4757 flags.go:64] FLAG: --exit-on-lock-contention="false" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.949876 4757 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.949885 4757 flags.go:64] FLAG: --experimental-mounter-path="" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.949893 4757 flags.go:64] FLAG: --fail-cgroupv1="false" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.949902 4757 flags.go:64] FLAG: --fail-swap-on="true" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.949911 4757 flags.go:64] FLAG: --feature-gates="" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.949922 4757 flags.go:64] FLAG: --file-check-frequency="20s" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.949931 4757 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.949940 4757 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.949949 4757 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.949958 4757 flags.go:64] FLAG: --healthz-port="10248" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.949967 4757 flags.go:64] FLAG: --help="false" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.949976 4757 flags.go:64] FLAG: --hostname-override="" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.949985 4757 flags.go:64] FLAG: --housekeeping-interval="10s" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.949994 4757 flags.go:64] FLAG: --http-check-frequency="20s" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.950003 4757 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.950012 4757 flags.go:64] FLAG: --image-credential-provider-config="" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.950045 4757 flags.go:64] FLAG: --image-gc-high-threshold="85" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.950054 4757 flags.go:64] FLAG: --image-gc-low-threshold="80" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.950063 4757 flags.go:64] FLAG: --image-service-endpoint="" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.950072 4757 flags.go:64] FLAG: --kernel-memcg-notification="false" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.950081 4757 flags.go:64] FLAG: --kube-api-burst="100" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.950113 4757 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.950123 4757 flags.go:64] FLAG: --kube-api-qps="50" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.950132 4757 flags.go:64] FLAG: --kube-reserved="" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.950141 4757 flags.go:64] FLAG: --kube-reserved-cgroup="" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.950149 4757 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.950159 4757 flags.go:64] FLAG: --kubelet-cgroups="" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.950168 4757 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.950177 4757 flags.go:64] FLAG: --lock-file="" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.950186 4757 flags.go:64] FLAG: --log-cadvisor-usage="false" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.950195 4757 flags.go:64] FLAG: --log-flush-frequency="5s" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.950205 4757 flags.go:64] FLAG: --log-json-info-buffer-size="0" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.950218 4757 flags.go:64] FLAG: --log-json-split-stream="false" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.950227 4757 flags.go:64] FLAG: --log-text-info-buffer-size="0" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.950236 4757 flags.go:64] FLAG: --log-text-split-stream="false" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.950244 4757 flags.go:64] FLAG: --logging-format="text" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.950254 4757 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.950263 4757 flags.go:64] FLAG: --make-iptables-util-chains="true" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.950272 4757 flags.go:64] FLAG: --manifest-url="" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.950280 4757 flags.go:64] FLAG: --manifest-url-header="" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.950291 4757 flags.go:64] FLAG: --max-housekeeping-interval="15s" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.950300 4757 flags.go:64] FLAG: --max-open-files="1000000" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.950311 4757 flags.go:64] FLAG: --max-pods="110" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.950320 4757 flags.go:64] FLAG: --maximum-dead-containers="-1" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.950329 4757 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.950338 4757 flags.go:64] FLAG: --memory-manager-policy="None" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.950346 4757 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.950356 4757 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.950365 4757 flags.go:64] FLAG: --node-ip="192.168.126.11" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.950374 4757 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.950393 4757 flags.go:64] FLAG: --node-status-max-images="50" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.950403 4757 flags.go:64] FLAG: --node-status-update-frequency="10s" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.950412 4757 flags.go:64] FLAG: --oom-score-adj="-999" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.950421 4757 flags.go:64] FLAG: --pod-cidr="" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.950430 4757 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.950442 4757 flags.go:64] FLAG: --pod-manifest-path="" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.950451 4757 flags.go:64] FLAG: --pod-max-pids="-1" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.950461 4757 flags.go:64] FLAG: --pods-per-core="0" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.950469 4757 flags.go:64] FLAG: --port="10250" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.950479 4757 flags.go:64] FLAG: --protect-kernel-defaults="false" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.950488 4757 flags.go:64] FLAG: --provider-id="" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.950497 4757 flags.go:64] FLAG: --qos-reserved="" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.950506 4757 flags.go:64] FLAG: --read-only-port="10255" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.950515 4757 flags.go:64] FLAG: --register-node="true" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.950523 4757 flags.go:64] FLAG: --register-schedulable="true" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.950533 4757 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.950547 4757 flags.go:64] FLAG: --registry-burst="10" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.950556 4757 flags.go:64] FLAG: --registry-qps="5" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.950565 4757 flags.go:64] FLAG: --reserved-cpus="" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.950574 4757 flags.go:64] FLAG: --reserved-memory="" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.950584 4757 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.950593 4757 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.950603 4757 flags.go:64] FLAG: --rotate-certificates="false" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.950611 4757 flags.go:64] FLAG: --rotate-server-certificates="false" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.950620 4757 flags.go:64] FLAG: --runonce="false" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.950629 4757 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.950638 4757 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.950648 4757 flags.go:64] FLAG: --seccomp-default="false" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.950662 4757 flags.go:64] FLAG: --serialize-image-pulls="true" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.950671 4757 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.950680 4757 flags.go:64] FLAG: --storage-driver-db="cadvisor" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.950689 4757 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.950698 4757 flags.go:64] FLAG: --storage-driver-password="root" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.950707 4757 flags.go:64] FLAG: --storage-driver-secure="false" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.950716 4757 flags.go:64] FLAG: --storage-driver-table="stats" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.950725 4757 flags.go:64] FLAG: --storage-driver-user="root" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.950735 4757 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.950744 4757 flags.go:64] FLAG: --sync-frequency="1m0s" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.950754 4757 flags.go:64] FLAG: --system-cgroups="" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.950762 4757 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.950776 4757 flags.go:64] FLAG: --system-reserved-cgroup="" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.950785 4757 flags.go:64] FLAG: --tls-cert-file="" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.950794 4757 flags.go:64] FLAG: --tls-cipher-suites="[]" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.950805 4757 flags.go:64] FLAG: --tls-min-version="" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.950813 4757 flags.go:64] FLAG: --tls-private-key-file="" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.950822 4757 flags.go:64] FLAG: --topology-manager-policy="none" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.950831 4757 flags.go:64] FLAG: --topology-manager-policy-options="" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.950839 4757 flags.go:64] FLAG: --topology-manager-scope="container" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.950848 4757 flags.go:64] FLAG: --v="2" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.950859 4757 flags.go:64] FLAG: --version="false" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.950870 4757 flags.go:64] FLAG: --vmodule="" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.950882 4757 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.950891 4757 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.951207 4757 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.951221 4757 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.951231 4757 feature_gate.go:330] unrecognized feature gate: PinnedImages Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.951239 4757 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.951248 4757 feature_gate.go:330] unrecognized feature gate: SignatureStores Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.951259 4757 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.951270 4757 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.951282 4757 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.951291 4757 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.951299 4757 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.951307 4757 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.951316 4757 feature_gate.go:330] unrecognized feature gate: OVNObservability Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.951323 4757 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.951331 4757 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.951338 4757 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.951346 4757 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.951354 4757 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.951361 4757 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.951369 4757 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.951376 4757 feature_gate.go:330] unrecognized feature gate: NewOLM Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.951384 4757 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.951392 4757 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.951400 4757 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.951408 4757 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.951415 4757 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.951423 4757 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.951431 4757 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.951439 4757 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.951447 4757 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.951454 4757 feature_gate.go:330] unrecognized feature gate: GatewayAPI Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.951462 4757 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.951470 4757 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.951478 4757 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.951486 4757 feature_gate.go:330] unrecognized feature gate: InsightsConfig Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.951494 4757 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.951502 4757 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.951510 4757 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.951518 4757 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.951526 4757 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.951534 4757 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.951541 4757 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.951551 4757 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.951561 4757 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.951570 4757 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.951578 4757 feature_gate.go:330] unrecognized feature gate: Example Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.951587 4757 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.951595 4757 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.951603 4757 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.951612 4757 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.951619 4757 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.951628 4757 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.951636 4757 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.951645 4757 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.951654 4757 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.951662 4757 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.951669 4757 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.951677 4757 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.951685 4757 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.951694 4757 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.951702 4757 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.951710 4757 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.951717 4757 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.951725 4757 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.951733 4757 feature_gate.go:330] unrecognized feature gate: PlatformOperators Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.951741 4757 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.951751 4757 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.951760 4757 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.951769 4757 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.951778 4757 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.951786 4757 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.951795 4757 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.951819 4757 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.962860 4757 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.962876 4757 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.962930 4757 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.962936 4757 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.962941 4757 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.962945 4757 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.962948 4757 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.962952 4757 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.962955 4757 feature_gate.go:330] unrecognized feature gate: InsightsConfig Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.962959 4757 feature_gate.go:330] unrecognized feature gate: OVNObservability Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.962963 4757 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.962967 4757 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.962972 4757 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.962977 4757 feature_gate.go:330] unrecognized feature gate: PinnedImages Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.962981 4757 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.962985 4757 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.962989 4757 feature_gate.go:330] unrecognized feature gate: SignatureStores Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.962993 4757 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.962997 4757 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963000 4757 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963004 4757 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963007 4757 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963011 4757 feature_gate.go:330] unrecognized feature gate: GatewayAPI Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963014 4757 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963018 4757 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963021 4757 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963025 4757 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963029 4757 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963032 4757 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963036 4757 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963040 4757 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963043 4757 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963047 4757 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963051 4757 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963055 4757 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963058 4757 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963064 4757 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963069 4757 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963073 4757 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963076 4757 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963080 4757 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963084 4757 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963087 4757 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963103 4757 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963107 4757 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963110 4757 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963114 4757 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963118 4757 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963123 4757 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963126 4757 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963130 4757 feature_gate.go:330] unrecognized feature gate: PlatformOperators Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963133 4757 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963137 4757 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963140 4757 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963144 4757 feature_gate.go:330] unrecognized feature gate: NewOLM Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963147 4757 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963152 4757 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963156 4757 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963160 4757 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963164 4757 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963167 4757 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963172 4757 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963176 4757 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963180 4757 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963184 4757 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963187 4757 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963191 4757 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963195 4757 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963198 4757 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963202 4757 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963205 4757 feature_gate.go:330] unrecognized feature gate: Example Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963209 4757 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963213 4757 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.963219 4757 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963318 4757 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963324 4757 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963328 4757 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963332 4757 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963336 4757 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963339 4757 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963343 4757 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963346 4757 feature_gate.go:330] unrecognized feature gate: PlatformOperators Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963350 4757 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963354 4757 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963357 4757 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963361 4757 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963364 4757 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963368 4757 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963371 4757 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963375 4757 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963378 4757 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963381 4757 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963385 4757 feature_gate.go:330] unrecognized feature gate: GatewayAPI Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963388 4757 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963391 4757 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963395 4757 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963398 4757 feature_gate.go:330] unrecognized feature gate: Example Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963403 4757 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963407 4757 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963411 4757 feature_gate.go:330] unrecognized feature gate: PinnedImages Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963415 4757 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963418 4757 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963422 4757 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963425 4757 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963429 4757 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963432 4757 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963435 4757 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963439 4757 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963444 4757 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963449 4757 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963453 4757 feature_gate.go:330] unrecognized feature gate: NewOLM Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963456 4757 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963460 4757 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963463 4757 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963467 4757 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963470 4757 feature_gate.go:330] unrecognized feature gate: SignatureStores Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963473 4757 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963477 4757 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963481 4757 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963486 4757 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963490 4757 feature_gate.go:330] unrecognized feature gate: OVNObservability Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963493 4757 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963497 4757 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963500 4757 feature_gate.go:330] unrecognized feature gate: InsightsConfig Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963504 4757 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963507 4757 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963510 4757 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963514 4757 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963517 4757 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963521 4757 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963524 4757 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963527 4757 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963531 4757 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963535 4757 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963538 4757 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963541 4757 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963545 4757 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963548 4757 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963553 4757 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963557 4757 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963560 4757 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963564 4757 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963567 4757 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963571 4757 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Oct 06 13:38:31 crc kubenswrapper[4757]: W1006 13:38:31.963576 4757 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.963582 4757 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.964296 4757 server.go:940] "Client rotation is on, will bootstrap in background" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.969648 4757 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.969765 4757 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.971607 4757 server.go:997] "Starting client certificate rotation" Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.971637 4757 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.972770 4757 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2026-01-07 20:58:32.902558243 +0000 UTC Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.972997 4757 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 2239h20m0.929569214s for next certificate rotation Oct 06 13:38:31 crc kubenswrapper[4757]: I1006 13:38:31.997953 4757 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.001111 4757 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.019955 4757 log.go:25] "Validated CRI v1 runtime API" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.054321 4757 log.go:25] "Validated CRI v1 image API" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.056084 4757 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.062609 4757 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2025-10-06-13-34-00-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.062656 4757 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:45 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:42 fsType:tmpfs blockSize:0}] Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.086392 4757 manager.go:217] Machine: {Timestamp:2025-10-06 13:38:32.08194951 +0000 UTC m=+0.579268087 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2800000 MemoryCapacity:33654124544 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:77015af3-b2cf-40c4-8ed8-504c8efcff1f BootID:e1ec82ec-57e6-47de-8235-c2486415aecd Filesystems:[{Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730825728 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827064320 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:45 Capacity:3365408768 Type:vfs Inodes:821633 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:42 Capacity:1073741824 Type:vfs Inodes:4108169 HasInodes:true} {Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827060224 Type:vfs Inodes:4108169 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:0c:d0:f5 Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:0c:d0:f5 Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:f1:9b:c9 Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:eb:03:ff Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:ce:3a:70 Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:7f:1c:7e Speed:-1 Mtu:1496} {Name:ens7.23 MacAddress:52:54:00:27:b2:a6 Speed:-1 Mtu:1496} {Name:eth10 MacAddress:2a:a0:d2:69:1e:f8 Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:ca:a2:3a:58:02:97 Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654124544 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:32768 Type:Data Level:1} {Id:10 Size:32768 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:32768 Type:Data Level:1} {Id:11 Size:32768 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:32768 Type:Data Level:1} {Id:8 Size:32768 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:32768 Type:Data Level:1} {Id:9 Size:32768 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.086639 4757 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.086763 4757 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.087023 4757 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.087276 4757 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.087311 4757 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.087522 4757 topology_manager.go:138] "Creating topology manager with none policy" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.087533 4757 container_manager_linux.go:303] "Creating device plugin manager" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.088158 4757 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.088195 4757 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.089277 4757 state_mem.go:36] "Initialized new in-memory state store" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.089376 4757 server.go:1245] "Using root directory" path="/var/lib/kubelet" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.093414 4757 kubelet.go:418] "Attempting to sync node with API server" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.093438 4757 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.093483 4757 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.093497 4757 kubelet.go:324] "Adding apiserver pod source" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.093509 4757 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.096810 4757 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.098859 4757 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Oct 06 13:38:32 crc kubenswrapper[4757]: W1006 13:38:32.100078 4757 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.199:6443: connect: connection refused Oct 06 13:38:32 crc kubenswrapper[4757]: W1006 13:38:32.100084 4757 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.199:6443: connect: connection refused Oct 06 13:38:32 crc kubenswrapper[4757]: E1006 13:38:32.100344 4757 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.199:6443: connect: connection refused" logger="UnhandledError" Oct 06 13:38:32 crc kubenswrapper[4757]: E1006 13:38:32.100352 4757 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.199:6443: connect: connection refused" logger="UnhandledError" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.101335 4757 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.102795 4757 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.102823 4757 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.102832 4757 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.102840 4757 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.102854 4757 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.102862 4757 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.102871 4757 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.102884 4757 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.102893 4757 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.102903 4757 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.102921 4757 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.102929 4757 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.103565 4757 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.103941 4757 server.go:1280] "Started kubelet" Oct 06 13:38:32 crc systemd[1]: Started Kubernetes Kubelet. Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.106866 4757 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.107527 4757 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.113663 4757 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.114234 4757 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.199:6443: connect: connection refused Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.114670 4757 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.114926 4757 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.116329 4757 volume_manager.go:287] "The desired_state_of_world populator starts" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.116376 4757 volume_manager.go:289] "Starting Kubelet Volume Manager" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.117009 4757 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.117358 4757 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-27 02:10:31.675686211 +0000 UTC Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.117548 4757 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 1236h31m59.55814837s for next certificate rotation Oct 06 13:38:32 crc kubenswrapper[4757]: E1006 13:38:32.118378 4757 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.199:6443: connect: connection refused" interval="200ms" Oct 06 13:38:32 crc kubenswrapper[4757]: E1006 13:38:32.117282 4757 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Oct 06 13:38:32 crc kubenswrapper[4757]: E1006 13:38:32.117626 4757 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.199:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.186bea7f59a31518 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-10-06 13:38:32.103916824 +0000 UTC m=+0.601235371,LastTimestamp:2025-10-06 13:38:32.103916824 +0000 UTC m=+0.601235371,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Oct 06 13:38:32 crc kubenswrapper[4757]: W1006 13:38:32.121557 4757 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.199:6443: connect: connection refused Oct 06 13:38:32 crc kubenswrapper[4757]: E1006 13:38:32.121696 4757 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.199:6443: connect: connection refused" logger="UnhandledError" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.122024 4757 factory.go:55] Registering systemd factory Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.122056 4757 factory.go:221] Registration of the systemd container factory successfully Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.123043 4757 factory.go:153] Registering CRI-O factory Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.123083 4757 factory.go:221] Registration of the crio container factory successfully Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.123131 4757 server.go:460] "Adding debug handlers to kubelet server" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.123233 4757 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.123277 4757 factory.go:103] Registering Raw factory Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.123336 4757 manager.go:1196] Started watching for new ooms in manager Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.124535 4757 manager.go:319] Starting recovery of all containers Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.146425 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.146549 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.146577 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.146600 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.146628 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.146646 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.146682 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.146701 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.146732 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.146934 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.146951 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.146985 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.147007 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.147038 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.147054 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.147079 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.147128 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.147160 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.147184 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.147202 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.147227 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.147244 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.147269 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.147286 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.147303 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.147324 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.147352 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.147372 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.147403 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.147427 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.147451 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.147479 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.147496 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.147518 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.147534 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.147550 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.147573 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.147598 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.147624 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.147645 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.147661 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.147686 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.147702 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.147724 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.147757 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.147777 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.147800 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.147817 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.147841 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.147859 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.147876 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.147898 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.148196 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.148246 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.148274 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.148459 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.148500 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.148527 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.148550 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.148563 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.148576 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.148593 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.148608 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.148630 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.148647 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.148668 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.148693 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.148712 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.148745 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.148764 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.148782 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.148805 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.148822 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.148842 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.148861 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.148884 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.148916 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.148940 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.148963 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.148988 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.149005 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.149036 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.149055 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.149068 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.149086 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.149127 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.149149 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.149173 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.149193 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.149256 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.149277 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.149312 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.149330 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.149353 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.149380 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.149398 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.149430 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.149453 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.149470 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.149490 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.149506 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.149526 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.149543 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.149562 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.149607 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.149664 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.149706 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.149731 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.149764 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.149795 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.149815 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.149840 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.149865 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.149885 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.150851 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.150945 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.152051 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.152086 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.152120 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.152138 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.152151 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.152164 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.152233 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.152250 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.152262 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.152274 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.152289 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.152305 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.152321 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.152336 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.152404 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.153135 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.153181 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.153196 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.153209 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.153222 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.153233 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.153247 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.153260 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.153272 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.153285 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.153298 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.153311 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.153322 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.153334 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.153345 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.153356 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.153369 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.153384 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.153401 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.153427 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.153450 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.153465 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.153481 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.153498 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.153513 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.153536 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.153554 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.153569 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.153585 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.153601 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.153613 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.153625 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.153637 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.153654 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.153669 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.153685 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.153701 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.153717 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.153734 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.153751 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.153767 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.153782 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.153798 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.153816 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.153830 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.153850 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.153868 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.153885 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.153902 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.153918 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.153942 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.153959 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.153974 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.153990 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.154008 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.154023 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.154039 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.154060 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.154076 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.154154 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.154174 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.154192 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.154214 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.154231 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.154248 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.154265 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.154281 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.154298 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.154314 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.154330 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.154355 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.158249 4757 manager.go:324] Recovery completed Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.159173 4757 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.159214 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.159248 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.159288 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.159301 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.159314 4757 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.159326 4757 reconstruct.go:97] "Volume reconstruction finished" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.159336 4757 reconciler.go:26] "Reconciler: start to sync state" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.174474 4757 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.174797 4757 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.176249 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.176290 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.176303 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.176996 4757 cpu_manager.go:225] "Starting CPU manager" policy="none" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.177010 4757 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.177029 4757 state_mem.go:36] "Initialized new in-memory state store" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.178205 4757 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.178383 4757 status_manager.go:217] "Starting to sync pod status with apiserver" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.178498 4757 kubelet.go:2335] "Starting kubelet main sync loop" Oct 06 13:38:32 crc kubenswrapper[4757]: E1006 13:38:32.178867 4757 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Oct 06 13:38:32 crc kubenswrapper[4757]: W1006 13:38:32.181014 4757 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.199:6443: connect: connection refused Oct 06 13:38:32 crc kubenswrapper[4757]: E1006 13:38:32.181180 4757 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.199:6443: connect: connection refused" logger="UnhandledError" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.193542 4757 policy_none.go:49] "None policy: Start" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.194340 4757 memory_manager.go:170] "Starting memorymanager" policy="None" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.194417 4757 state_mem.go:35] "Initializing new in-memory state store" Oct 06 13:38:32 crc kubenswrapper[4757]: E1006 13:38:32.219673 4757 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.262328 4757 manager.go:334] "Starting Device Plugin manager" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.262531 4757 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.262549 4757 server.go:79] "Starting device plugin registration server" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.262871 4757 eviction_manager.go:189] "Eviction manager: starting control loop" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.262891 4757 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.263125 4757 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.263207 4757 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.263214 4757 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Oct 06 13:38:32 crc kubenswrapper[4757]: E1006 13:38:32.275362 4757 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.279758 4757 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc","openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc"] Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.279883 4757 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.281535 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.281565 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.281574 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.281709 4757 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.281923 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.281973 4757 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.282648 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.282672 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.282679 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.282739 4757 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.282869 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.282912 4757 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.283034 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.283071 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.283088 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.283514 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.283542 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.283551 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.283626 4757 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.283730 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.283751 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.283766 4757 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.283792 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.283809 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.284294 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.284326 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.284340 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.284545 4757 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.284662 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.284698 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.284706 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.284746 4757 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.284715 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.285519 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.285561 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.285579 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.285717 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.285731 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.285738 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.285812 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.285851 4757 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.286899 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.286938 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.286957 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:38:32 crc kubenswrapper[4757]: E1006 13:38:32.320133 4757 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.199:6443: connect: connection refused" interval="400ms" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.361653 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.361864 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.362024 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.362193 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.362418 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.362567 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.362708 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.362840 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.362988 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.363180 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.363047 4757 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.363409 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.363559 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.363696 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.363855 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.363988 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.364496 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.364523 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.364534 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.364557 4757 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 06 13:38:32 crc kubenswrapper[4757]: E1006 13:38:32.364993 4757 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.199:6443: connect: connection refused" node="crc" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.466063 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.466555 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.466593 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.466626 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.466657 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.466686 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.466715 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.466744 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.466774 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.466803 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.466832 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.466859 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.466888 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.466918 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.466947 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.466332 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.467456 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.467503 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.467537 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.467621 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.467667 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.467714 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.467942 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.467985 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.468038 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.468080 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.468153 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.468195 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.468239 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.468282 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.565576 4757 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.567258 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.567421 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.567539 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.567667 4757 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 06 13:38:32 crc kubenswrapper[4757]: E1006 13:38:32.568192 4757 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.199:6443: connect: connection refused" node="crc" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.628394 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.638153 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.668302 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 06 13:38:32 crc kubenswrapper[4757]: W1006 13:38:32.697893 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-c7ec90b3a1dfdcfc2abb7c2a46c61e7142c73933a74542fa66520bcbc3ba3c1b WatchSource:0}: Error finding container c7ec90b3a1dfdcfc2abb7c2a46c61e7142c73933a74542fa66520bcbc3ba3c1b: Status 404 returned error can't find the container with id c7ec90b3a1dfdcfc2abb7c2a46c61e7142c73933a74542fa66520bcbc3ba3c1b Oct 06 13:38:32 crc kubenswrapper[4757]: W1006 13:38:32.700711 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-8d2f52e743d9bea85e1f93fb9e5a67bc9afda0a20f14a5502310fce5c9ea0de2 WatchSource:0}: Error finding container 8d2f52e743d9bea85e1f93fb9e5a67bc9afda0a20f14a5502310fce5c9ea0de2: Status 404 returned error can't find the container with id 8d2f52e743d9bea85e1f93fb9e5a67bc9afda0a20f14a5502310fce5c9ea0de2 Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.701024 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.706488 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 06 13:38:32 crc kubenswrapper[4757]: W1006 13:38:32.718235 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-60cfdc76fa2ae446814a8b100088fe374d62a881cb6b2c46a88f0ec055a0a8cc WatchSource:0}: Error finding container 60cfdc76fa2ae446814a8b100088fe374d62a881cb6b2c46a88f0ec055a0a8cc: Status 404 returned error can't find the container with id 60cfdc76fa2ae446814a8b100088fe374d62a881cb6b2c46a88f0ec055a0a8cc Oct 06 13:38:32 crc kubenswrapper[4757]: E1006 13:38:32.720822 4757 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.199:6443: connect: connection refused" interval="800ms" Oct 06 13:38:32 crc kubenswrapper[4757]: W1006 13:38:32.728657 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-1d208fdaffcb42c2784c88dcf618e4b45b6501cb4b3e65644f1b982661331664 WatchSource:0}: Error finding container 1d208fdaffcb42c2784c88dcf618e4b45b6501cb4b3e65644f1b982661331664: Status 404 returned error can't find the container with id 1d208fdaffcb42c2784c88dcf618e4b45b6501cb4b3e65644f1b982661331664 Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.969114 4757 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.970837 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.970941 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.970956 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:38:32 crc kubenswrapper[4757]: I1006 13:38:32.970985 4757 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 06 13:38:32 crc kubenswrapper[4757]: E1006 13:38:32.971752 4757 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.199:6443: connect: connection refused" node="crc" Oct 06 13:38:33 crc kubenswrapper[4757]: I1006 13:38:33.116302 4757 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.199:6443: connect: connection refused Oct 06 13:38:33 crc kubenswrapper[4757]: W1006 13:38:33.135184 4757 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.199:6443: connect: connection refused Oct 06 13:38:33 crc kubenswrapper[4757]: E1006 13:38:33.135305 4757 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.199:6443: connect: connection refused" logger="UnhandledError" Oct 06 13:38:33 crc kubenswrapper[4757]: I1006 13:38:33.184725 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"1d208fdaffcb42c2784c88dcf618e4b45b6501cb4b3e65644f1b982661331664"} Oct 06 13:38:33 crc kubenswrapper[4757]: I1006 13:38:33.186017 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"60cfdc76fa2ae446814a8b100088fe374d62a881cb6b2c46a88f0ec055a0a8cc"} Oct 06 13:38:33 crc kubenswrapper[4757]: I1006 13:38:33.187481 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"f28c61cad40d75cf7dbcad1788a8da208df056b3f58852318e6a23826aceb4ac"} Oct 06 13:38:33 crc kubenswrapper[4757]: I1006 13:38:33.189355 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"8d2f52e743d9bea85e1f93fb9e5a67bc9afda0a20f14a5502310fce5c9ea0de2"} Oct 06 13:38:33 crc kubenswrapper[4757]: I1006 13:38:33.192698 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"c7ec90b3a1dfdcfc2abb7c2a46c61e7142c73933a74542fa66520bcbc3ba3c1b"} Oct 06 13:38:33 crc kubenswrapper[4757]: W1006 13:38:33.196428 4757 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.199:6443: connect: connection refused Oct 06 13:38:33 crc kubenswrapper[4757]: E1006 13:38:33.196494 4757 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.199:6443: connect: connection refused" logger="UnhandledError" Oct 06 13:38:33 crc kubenswrapper[4757]: W1006 13:38:33.260200 4757 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.199:6443: connect: connection refused Oct 06 13:38:33 crc kubenswrapper[4757]: E1006 13:38:33.260275 4757 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.199:6443: connect: connection refused" logger="UnhandledError" Oct 06 13:38:33 crc kubenswrapper[4757]: E1006 13:38:33.522139 4757 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.199:6443: connect: connection refused" interval="1.6s" Oct 06 13:38:33 crc kubenswrapper[4757]: W1006 13:38:33.756216 4757 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.199:6443: connect: connection refused Oct 06 13:38:33 crc kubenswrapper[4757]: E1006 13:38:33.756352 4757 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.199:6443: connect: connection refused" logger="UnhandledError" Oct 06 13:38:33 crc kubenswrapper[4757]: I1006 13:38:33.772756 4757 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 06 13:38:33 crc kubenswrapper[4757]: I1006 13:38:33.774387 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:38:33 crc kubenswrapper[4757]: I1006 13:38:33.774422 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:38:33 crc kubenswrapper[4757]: I1006 13:38:33.774431 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:38:33 crc kubenswrapper[4757]: I1006 13:38:33.774453 4757 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 06 13:38:33 crc kubenswrapper[4757]: E1006 13:38:33.775123 4757 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.199:6443: connect: connection refused" node="crc" Oct 06 13:38:34 crc kubenswrapper[4757]: I1006 13:38:34.116081 4757 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.199:6443: connect: connection refused Oct 06 13:38:34 crc kubenswrapper[4757]: I1006 13:38:34.199316 4757 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="6dd21b8274429b18ba29738562ddc6e53ecf3a250c73464a11ae59862637138e" exitCode=0 Oct 06 13:38:34 crc kubenswrapper[4757]: I1006 13:38:34.199446 4757 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 06 13:38:34 crc kubenswrapper[4757]: I1006 13:38:34.199576 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"6dd21b8274429b18ba29738562ddc6e53ecf3a250c73464a11ae59862637138e"} Oct 06 13:38:34 crc kubenswrapper[4757]: I1006 13:38:34.200815 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:38:34 crc kubenswrapper[4757]: I1006 13:38:34.200887 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:38:34 crc kubenswrapper[4757]: I1006 13:38:34.200917 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:38:34 crc kubenswrapper[4757]: I1006 13:38:34.203387 4757 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="9fbe7f72f01daeac82347a11112c1138e744a97c0df2d111fef60c8be750ce39" exitCode=0 Oct 06 13:38:34 crc kubenswrapper[4757]: I1006 13:38:34.203527 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"9fbe7f72f01daeac82347a11112c1138e744a97c0df2d111fef60c8be750ce39"} Oct 06 13:38:34 crc kubenswrapper[4757]: I1006 13:38:34.203566 4757 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 06 13:38:34 crc kubenswrapper[4757]: I1006 13:38:34.206189 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:38:34 crc kubenswrapper[4757]: I1006 13:38:34.206221 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:38:34 crc kubenswrapper[4757]: I1006 13:38:34.206232 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:38:34 crc kubenswrapper[4757]: I1006 13:38:34.211365 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"8e08edbfb6418e26eb6c3ef0d229a623bc625e8e20c29d41a95adc8ac8dc2534"} Oct 06 13:38:34 crc kubenswrapper[4757]: I1006 13:38:34.211421 4757 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 06 13:38:34 crc kubenswrapper[4757]: I1006 13:38:34.211429 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"056d1a4bdaee6e39a0ba0105c9b48d23cd37c37936951e8e2aa123f6de93463d"} Oct 06 13:38:34 crc kubenswrapper[4757]: I1006 13:38:34.211444 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"7e5392ae609235f0d0526e4d115125c20a75af131b89c07ae6bfa3b4bac84108"} Oct 06 13:38:34 crc kubenswrapper[4757]: I1006 13:38:34.211457 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"3d06bc2afab6df22e2c8140de3e6288a3e5f4190b6bdb31a2e71170275e70cae"} Oct 06 13:38:34 crc kubenswrapper[4757]: I1006 13:38:34.212577 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:38:34 crc kubenswrapper[4757]: I1006 13:38:34.212611 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:38:34 crc kubenswrapper[4757]: I1006 13:38:34.212623 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:38:34 crc kubenswrapper[4757]: I1006 13:38:34.214136 4757 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="ca584765a9838801f99c7fb1f7bbcc740e684033b1b19035ef95b72526c6eda1" exitCode=0 Oct 06 13:38:34 crc kubenswrapper[4757]: I1006 13:38:34.214200 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"ca584765a9838801f99c7fb1f7bbcc740e684033b1b19035ef95b72526c6eda1"} Oct 06 13:38:34 crc kubenswrapper[4757]: I1006 13:38:34.214336 4757 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 06 13:38:34 crc kubenswrapper[4757]: I1006 13:38:34.215183 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:38:34 crc kubenswrapper[4757]: I1006 13:38:34.215234 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:38:34 crc kubenswrapper[4757]: I1006 13:38:34.215244 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:38:34 crc kubenswrapper[4757]: I1006 13:38:34.216261 4757 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="3026d0b19b3daf4ab7fee7b94d94d36be86de9d616744facaaff85f163b549d0" exitCode=0 Oct 06 13:38:34 crc kubenswrapper[4757]: I1006 13:38:34.216296 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"3026d0b19b3daf4ab7fee7b94d94d36be86de9d616744facaaff85f163b549d0"} Oct 06 13:38:34 crc kubenswrapper[4757]: I1006 13:38:34.216363 4757 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 06 13:38:34 crc kubenswrapper[4757]: I1006 13:38:34.216664 4757 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 06 13:38:34 crc kubenswrapper[4757]: I1006 13:38:34.218744 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:38:34 crc kubenswrapper[4757]: I1006 13:38:34.218770 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:38:34 crc kubenswrapper[4757]: I1006 13:38:34.218781 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:38:34 crc kubenswrapper[4757]: I1006 13:38:34.219070 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:38:34 crc kubenswrapper[4757]: I1006 13:38:34.219252 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:38:34 crc kubenswrapper[4757]: I1006 13:38:34.219275 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:38:34 crc kubenswrapper[4757]: W1006 13:38:34.794133 4757 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.199:6443: connect: connection refused Oct 06 13:38:34 crc kubenswrapper[4757]: E1006 13:38:34.794612 4757 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.199:6443: connect: connection refused" logger="UnhandledError" Oct 06 13:38:34 crc kubenswrapper[4757]: W1006 13:38:34.845048 4757 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.199:6443: connect: connection refused Oct 06 13:38:34 crc kubenswrapper[4757]: E1006 13:38:34.845155 4757 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.199:6443: connect: connection refused" logger="UnhandledError" Oct 06 13:38:35 crc kubenswrapper[4757]: I1006 13:38:35.116334 4757 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.199:6443: connect: connection refused Oct 06 13:38:35 crc kubenswrapper[4757]: E1006 13:38:35.122953 4757 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.199:6443: connect: connection refused" interval="3.2s" Oct 06 13:38:35 crc kubenswrapper[4757]: I1006 13:38:35.218159 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 06 13:38:35 crc kubenswrapper[4757]: I1006 13:38:35.219931 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"cd48981887f9fefbe0d6dc191c7ef76a913ac4fbb48b3f167122987486f56bc4"} Oct 06 13:38:35 crc kubenswrapper[4757]: I1006 13:38:35.220074 4757 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 06 13:38:35 crc kubenswrapper[4757]: I1006 13:38:35.221735 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:38:35 crc kubenswrapper[4757]: I1006 13:38:35.221802 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:38:35 crc kubenswrapper[4757]: I1006 13:38:35.221820 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:38:35 crc kubenswrapper[4757]: I1006 13:38:35.222795 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"69aec338eb12fbfcd83c476590f12c4a2b4aff507981052b00060900f88a26a7"} Oct 06 13:38:35 crc kubenswrapper[4757]: I1006 13:38:35.222836 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"3a66ed2ef9cbfedf37b5bc458002c81c7d608f3bb904f5e7480d9874cfe29ada"} Oct 06 13:38:35 crc kubenswrapper[4757]: I1006 13:38:35.222850 4757 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 06 13:38:35 crc kubenswrapper[4757]: I1006 13:38:35.222855 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"d9249418a973cd8c582a5f126d293b35071887e197f98c0c911823fc76e7ab90"} Oct 06 13:38:35 crc kubenswrapper[4757]: I1006 13:38:35.223996 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:38:35 crc kubenswrapper[4757]: I1006 13:38:35.224029 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:38:35 crc kubenswrapper[4757]: I1006 13:38:35.224044 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:38:35 crc kubenswrapper[4757]: I1006 13:38:35.226248 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"1bef054ed2b7c27f200f7ab089158ba9946ca17b507c85c377504f539812fcf6"} Oct 06 13:38:35 crc kubenswrapper[4757]: I1006 13:38:35.226314 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"beb90630132db1396d9009fe35d6d6b8ab6afe59d32f196cf7a10f3179103227"} Oct 06 13:38:35 crc kubenswrapper[4757]: I1006 13:38:35.226335 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"4119269e9f663bc5c38f4fdc8a5b32f1c3d8446fed6fd36172c41207c9c287b3"} Oct 06 13:38:35 crc kubenswrapper[4757]: I1006 13:38:35.228348 4757 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="dddb841ba0b288dbf0960daf6808b76c68e5e1469097441786908ede77b19ae4" exitCode=0 Oct 06 13:38:35 crc kubenswrapper[4757]: I1006 13:38:35.228445 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"dddb841ba0b288dbf0960daf6808b76c68e5e1469097441786908ede77b19ae4"} Oct 06 13:38:35 crc kubenswrapper[4757]: I1006 13:38:35.228493 4757 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 06 13:38:35 crc kubenswrapper[4757]: I1006 13:38:35.228523 4757 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 06 13:38:35 crc kubenswrapper[4757]: I1006 13:38:35.229811 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:38:35 crc kubenswrapper[4757]: I1006 13:38:35.229841 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:38:35 crc kubenswrapper[4757]: I1006 13:38:35.229854 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:38:35 crc kubenswrapper[4757]: I1006 13:38:35.231082 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:38:35 crc kubenswrapper[4757]: I1006 13:38:35.231127 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:38:35 crc kubenswrapper[4757]: I1006 13:38:35.231140 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:38:35 crc kubenswrapper[4757]: I1006 13:38:35.375933 4757 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 06 13:38:35 crc kubenswrapper[4757]: I1006 13:38:35.377304 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:38:35 crc kubenswrapper[4757]: I1006 13:38:35.377363 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:38:35 crc kubenswrapper[4757]: I1006 13:38:35.377380 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:38:35 crc kubenswrapper[4757]: I1006 13:38:35.377416 4757 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 06 13:38:35 crc kubenswrapper[4757]: E1006 13:38:35.378075 4757 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.199:6443: connect: connection refused" node="crc" Oct 06 13:38:35 crc kubenswrapper[4757]: W1006 13:38:35.864719 4757 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.199:6443: connect: connection refused Oct 06 13:38:35 crc kubenswrapper[4757]: E1006 13:38:35.864859 4757 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.199:6443: connect: connection refused" logger="UnhandledError" Oct 06 13:38:36 crc kubenswrapper[4757]: I1006 13:38:36.238249 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"813c083e0e8ba3b397a4b2795de1eac82299d7f673832272138f20162b7f2db2"} Oct 06 13:38:36 crc kubenswrapper[4757]: I1006 13:38:36.238309 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"18af66d22d5905beeec34c4d158a88aadb1a8076e423996cd42e4172a31426e0"} Oct 06 13:38:36 crc kubenswrapper[4757]: I1006 13:38:36.238360 4757 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 06 13:38:36 crc kubenswrapper[4757]: I1006 13:38:36.247250 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:38:36 crc kubenswrapper[4757]: I1006 13:38:36.247298 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:38:36 crc kubenswrapper[4757]: I1006 13:38:36.247312 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:38:36 crc kubenswrapper[4757]: I1006 13:38:36.249860 4757 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="79096edd5f40a2cf75d72b15bd326a6003105304c0975f33f243287ce06232c1" exitCode=0 Oct 06 13:38:36 crc kubenswrapper[4757]: I1006 13:38:36.249991 4757 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 06 13:38:36 crc kubenswrapper[4757]: I1006 13:38:36.250242 4757 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 06 13:38:36 crc kubenswrapper[4757]: I1006 13:38:36.250499 4757 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 06 13:38:36 crc kubenswrapper[4757]: I1006 13:38:36.250528 4757 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 06 13:38:36 crc kubenswrapper[4757]: I1006 13:38:36.250477 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"79096edd5f40a2cf75d72b15bd326a6003105304c0975f33f243287ce06232c1"} Oct 06 13:38:36 crc kubenswrapper[4757]: I1006 13:38:36.251143 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:38:36 crc kubenswrapper[4757]: I1006 13:38:36.251168 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:38:36 crc kubenswrapper[4757]: I1006 13:38:36.251177 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:38:36 crc kubenswrapper[4757]: I1006 13:38:36.251139 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:38:36 crc kubenswrapper[4757]: I1006 13:38:36.251326 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:38:36 crc kubenswrapper[4757]: I1006 13:38:36.251354 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:38:36 crc kubenswrapper[4757]: I1006 13:38:36.252928 4757 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 06 13:38:36 crc kubenswrapper[4757]: I1006 13:38:36.253125 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:38:36 crc kubenswrapper[4757]: I1006 13:38:36.253239 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:38:36 crc kubenswrapper[4757]: I1006 13:38:36.253251 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:38:36 crc kubenswrapper[4757]: I1006 13:38:36.254346 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:38:36 crc kubenswrapper[4757]: I1006 13:38:36.254385 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:38:36 crc kubenswrapper[4757]: I1006 13:38:36.254395 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:38:36 crc kubenswrapper[4757]: I1006 13:38:36.659073 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 06 13:38:37 crc kubenswrapper[4757]: I1006 13:38:37.091058 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 06 13:38:37 crc kubenswrapper[4757]: I1006 13:38:37.098889 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 06 13:38:37 crc kubenswrapper[4757]: I1006 13:38:37.256718 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"604a2f26dcd1675b9f48a8873f33209e65756560f4c511fbdb0abe3dc9d557e7"} Oct 06 13:38:37 crc kubenswrapper[4757]: I1006 13:38:37.256766 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"7ad78097bfc586c3d76a0ff963ccae5e32791b2c5980e3e6f6909c99bb876b88"} Oct 06 13:38:37 crc kubenswrapper[4757]: I1006 13:38:37.256783 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"c956a7a867b2d2aac679298d7bdb87215c1b6f35df8129e95f964b222a3f035b"} Oct 06 13:38:37 crc kubenswrapper[4757]: I1006 13:38:37.256809 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"df86ea46b678a89f771d493eb4e3c7a7dac1740bc2a48eb1d73f3d5195b03165"} Oct 06 13:38:37 crc kubenswrapper[4757]: I1006 13:38:37.256821 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"b8a2aab4e9d71182fabc776a65e3a338c7acdef20b99bb12fa5f09c400734258"} Oct 06 13:38:37 crc kubenswrapper[4757]: I1006 13:38:37.256851 4757 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 06 13:38:37 crc kubenswrapper[4757]: I1006 13:38:37.256920 4757 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 06 13:38:37 crc kubenswrapper[4757]: I1006 13:38:37.256856 4757 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 06 13:38:37 crc kubenswrapper[4757]: I1006 13:38:37.257244 4757 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 06 13:38:37 crc kubenswrapper[4757]: I1006 13:38:37.257661 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:38:37 crc kubenswrapper[4757]: I1006 13:38:37.257686 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:38:37 crc kubenswrapper[4757]: I1006 13:38:37.257696 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:38:37 crc kubenswrapper[4757]: I1006 13:38:37.257674 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:38:37 crc kubenswrapper[4757]: I1006 13:38:37.257793 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:38:37 crc kubenswrapper[4757]: I1006 13:38:37.257801 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:38:37 crc kubenswrapper[4757]: I1006 13:38:37.257972 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:38:37 crc kubenswrapper[4757]: I1006 13:38:37.257993 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:38:37 crc kubenswrapper[4757]: I1006 13:38:37.258006 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:38:38 crc kubenswrapper[4757]: I1006 13:38:38.258711 4757 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 06 13:38:38 crc kubenswrapper[4757]: I1006 13:38:38.258780 4757 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 06 13:38:38 crc kubenswrapper[4757]: I1006 13:38:38.258853 4757 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 06 13:38:38 crc kubenswrapper[4757]: I1006 13:38:38.258907 4757 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 06 13:38:38 crc kubenswrapper[4757]: I1006 13:38:38.258973 4757 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 06 13:38:38 crc kubenswrapper[4757]: I1006 13:38:38.260437 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:38:38 crc kubenswrapper[4757]: I1006 13:38:38.260491 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:38:38 crc kubenswrapper[4757]: I1006 13:38:38.260514 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:38:38 crc kubenswrapper[4757]: I1006 13:38:38.260522 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:38:38 crc kubenswrapper[4757]: I1006 13:38:38.260545 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:38:38 crc kubenswrapper[4757]: I1006 13:38:38.260558 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:38:38 crc kubenswrapper[4757]: I1006 13:38:38.260574 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:38:38 crc kubenswrapper[4757]: I1006 13:38:38.260609 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:38:38 crc kubenswrapper[4757]: I1006 13:38:38.260628 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:38:38 crc kubenswrapper[4757]: I1006 13:38:38.578783 4757 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 06 13:38:38 crc kubenswrapper[4757]: I1006 13:38:38.583900 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:38:38 crc kubenswrapper[4757]: I1006 13:38:38.583945 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:38:38 crc kubenswrapper[4757]: I1006 13:38:38.583958 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:38:38 crc kubenswrapper[4757]: I1006 13:38:38.583985 4757 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 06 13:38:39 crc kubenswrapper[4757]: I1006 13:38:39.708358 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Oct 06 13:38:39 crc kubenswrapper[4757]: I1006 13:38:39.708549 4757 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 06 13:38:39 crc kubenswrapper[4757]: I1006 13:38:39.709667 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:38:39 crc kubenswrapper[4757]: I1006 13:38:39.709698 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:38:39 crc kubenswrapper[4757]: I1006 13:38:39.709710 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:38:39 crc kubenswrapper[4757]: I1006 13:38:39.911802 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 06 13:38:39 crc kubenswrapper[4757]: I1006 13:38:39.911957 4757 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 06 13:38:39 crc kubenswrapper[4757]: I1006 13:38:39.911990 4757 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 06 13:38:39 crc kubenswrapper[4757]: I1006 13:38:39.913241 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:38:39 crc kubenswrapper[4757]: I1006 13:38:39.913293 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:38:39 crc kubenswrapper[4757]: I1006 13:38:39.913311 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:38:40 crc kubenswrapper[4757]: I1006 13:38:40.652740 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 06 13:38:40 crc kubenswrapper[4757]: I1006 13:38:40.652936 4757 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 06 13:38:40 crc kubenswrapper[4757]: I1006 13:38:40.654151 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:38:40 crc kubenswrapper[4757]: I1006 13:38:40.654188 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:38:40 crc kubenswrapper[4757]: I1006 13:38:40.654200 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:38:41 crc kubenswrapper[4757]: I1006 13:38:41.600226 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Oct 06 13:38:41 crc kubenswrapper[4757]: I1006 13:38:41.600512 4757 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 06 13:38:41 crc kubenswrapper[4757]: I1006 13:38:41.602212 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:38:41 crc kubenswrapper[4757]: I1006 13:38:41.602289 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:38:41 crc kubenswrapper[4757]: I1006 13:38:41.602311 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:38:41 crc kubenswrapper[4757]: I1006 13:38:41.902941 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 06 13:38:41 crc kubenswrapper[4757]: I1006 13:38:41.903213 4757 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 06 13:38:41 crc kubenswrapper[4757]: I1006 13:38:41.904878 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:38:41 crc kubenswrapper[4757]: I1006 13:38:41.904943 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:38:41 crc kubenswrapper[4757]: I1006 13:38:41.904958 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:38:42 crc kubenswrapper[4757]: E1006 13:38:42.276294 4757 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Oct 06 13:38:42 crc kubenswrapper[4757]: I1006 13:38:42.279492 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 06 13:38:42 crc kubenswrapper[4757]: I1006 13:38:42.279727 4757 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 06 13:38:42 crc kubenswrapper[4757]: I1006 13:38:42.280885 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:38:42 crc kubenswrapper[4757]: I1006 13:38:42.280928 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:38:42 crc kubenswrapper[4757]: I1006 13:38:42.280942 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:38:43 crc kubenswrapper[4757]: I1006 13:38:43.462491 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 06 13:38:43 crc kubenswrapper[4757]: I1006 13:38:43.462750 4757 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 06 13:38:43 crc kubenswrapper[4757]: I1006 13:38:43.463912 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:38:43 crc kubenswrapper[4757]: I1006 13:38:43.463942 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:38:43 crc kubenswrapper[4757]: I1006 13:38:43.463954 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:38:43 crc kubenswrapper[4757]: I1006 13:38:43.467509 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 06 13:38:44 crc kubenswrapper[4757]: I1006 13:38:44.273996 4757 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 06 13:38:44 crc kubenswrapper[4757]: I1006 13:38:44.275300 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:38:44 crc kubenswrapper[4757]: I1006 13:38:44.275345 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:38:44 crc kubenswrapper[4757]: I1006 13:38:44.275361 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:38:46 crc kubenswrapper[4757]: I1006 13:38:46.116461 4757 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": net/http: TLS handshake timeout Oct 06 13:38:46 crc kubenswrapper[4757]: I1006 13:38:46.402430 4757 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Liveness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:52508->192.168.126.11:17697: read: connection reset by peer" start-of-body= Oct 06 13:38:46 crc kubenswrapper[4757]: I1006 13:38:46.402509 4757 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:52508->192.168.126.11:17697: read: connection reset by peer" Oct 06 13:38:46 crc kubenswrapper[4757]: I1006 13:38:46.463402 4757 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Oct 06 13:38:46 crc kubenswrapper[4757]: I1006 13:38:46.463486 4757 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Oct 06 13:38:46 crc kubenswrapper[4757]: I1006 13:38:46.614260 4757 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Oct 06 13:38:46 crc kubenswrapper[4757]: I1006 13:38:46.614446 4757 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Oct 06 13:38:46 crc kubenswrapper[4757]: I1006 13:38:46.620998 4757 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Oct 06 13:38:46 crc kubenswrapper[4757]: I1006 13:38:46.621089 4757 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Oct 06 13:38:46 crc kubenswrapper[4757]: I1006 13:38:46.667806 4757 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\": RBAC: [clusterrole.rbac.authorization.k8s.io \"system:openshift:public-info-viewer\" not found, clusterrole.rbac.authorization.k8s.io \"system:public-info-viewer\" not found]","reason":"Forbidden","details":{},"code":403} Oct 06 13:38:46 crc kubenswrapper[4757]: I1006 13:38:46.667866 4757 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Oct 06 13:38:47 crc kubenswrapper[4757]: I1006 13:38:47.283922 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Oct 06 13:38:47 crc kubenswrapper[4757]: I1006 13:38:47.287012 4757 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="813c083e0e8ba3b397a4b2795de1eac82299d7f673832272138f20162b7f2db2" exitCode=255 Oct 06 13:38:47 crc kubenswrapper[4757]: I1006 13:38:47.287132 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"813c083e0e8ba3b397a4b2795de1eac82299d7f673832272138f20162b7f2db2"} Oct 06 13:38:47 crc kubenswrapper[4757]: I1006 13:38:47.287388 4757 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 06 13:38:47 crc kubenswrapper[4757]: I1006 13:38:47.288774 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:38:47 crc kubenswrapper[4757]: I1006 13:38:47.288855 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:38:47 crc kubenswrapper[4757]: I1006 13:38:47.288875 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:38:47 crc kubenswrapper[4757]: I1006 13:38:47.289669 4757 scope.go:117] "RemoveContainer" containerID="813c083e0e8ba3b397a4b2795de1eac82299d7f673832272138f20162b7f2db2" Oct 06 13:38:48 crc kubenswrapper[4757]: I1006 13:38:48.292185 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Oct 06 13:38:48 crc kubenswrapper[4757]: I1006 13:38:48.294449 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"123bbe58d8664c8fad1b86c9c321228f7ffe7cdf14c44e1d9be7802e10a3f0d5"} Oct 06 13:38:48 crc kubenswrapper[4757]: I1006 13:38:48.294732 4757 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 06 13:38:48 crc kubenswrapper[4757]: I1006 13:38:48.296303 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:38:48 crc kubenswrapper[4757]: I1006 13:38:48.296341 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:38:48 crc kubenswrapper[4757]: I1006 13:38:48.296349 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:38:49 crc kubenswrapper[4757]: I1006 13:38:49.740683 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Oct 06 13:38:49 crc kubenswrapper[4757]: I1006 13:38:49.740858 4757 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 06 13:38:49 crc kubenswrapper[4757]: I1006 13:38:49.741922 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:38:49 crc kubenswrapper[4757]: I1006 13:38:49.741979 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:38:49 crc kubenswrapper[4757]: I1006 13:38:49.741989 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:38:49 crc kubenswrapper[4757]: I1006 13:38:49.775634 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Oct 06 13:38:50 crc kubenswrapper[4757]: I1006 13:38:50.299820 4757 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 06 13:38:50 crc kubenswrapper[4757]: I1006 13:38:50.301162 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:38:50 crc kubenswrapper[4757]: I1006 13:38:50.301225 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:38:50 crc kubenswrapper[4757]: I1006 13:38:50.301248 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:38:50 crc kubenswrapper[4757]: I1006 13:38:50.653105 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 06 13:38:50 crc kubenswrapper[4757]: I1006 13:38:50.653275 4757 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 06 13:38:50 crc kubenswrapper[4757]: I1006 13:38:50.654251 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:38:50 crc kubenswrapper[4757]: I1006 13:38:50.654296 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:38:50 crc kubenswrapper[4757]: I1006 13:38:50.654308 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:38:51 crc kubenswrapper[4757]: E1006 13:38:51.603556 4757 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": context deadline exceeded" interval="6.4s" Oct 06 13:38:51 crc kubenswrapper[4757]: I1006 13:38:51.608149 4757 trace.go:236] Trace[1074048434]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (06-Oct-2025 13:38:36.806) (total time: 14801ms): Oct 06 13:38:51 crc kubenswrapper[4757]: Trace[1074048434]: ---"Objects listed" error: 14801ms (13:38:51.608) Oct 06 13:38:51 crc kubenswrapper[4757]: Trace[1074048434]: [14.801536643s] [14.801536643s] END Oct 06 13:38:51 crc kubenswrapper[4757]: I1006 13:38:51.608186 4757 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Oct 06 13:38:51 crc kubenswrapper[4757]: I1006 13:38:51.608353 4757 trace.go:236] Trace[849441082]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (06-Oct-2025 13:38:39.429) (total time: 12178ms): Oct 06 13:38:51 crc kubenswrapper[4757]: Trace[849441082]: ---"Objects listed" error: 12178ms (13:38:51.608) Oct 06 13:38:51 crc kubenswrapper[4757]: Trace[849441082]: [12.178416571s] [12.178416571s] END Oct 06 13:38:51 crc kubenswrapper[4757]: I1006 13:38:51.608392 4757 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Oct 06 13:38:51 crc kubenswrapper[4757]: E1006 13:38:51.608801 4757 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Oct 06 13:38:51 crc kubenswrapper[4757]: I1006 13:38:51.608996 4757 trace.go:236] Trace[2099389530]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (06-Oct-2025 13:38:41.084) (total time: 10524ms): Oct 06 13:38:51 crc kubenswrapper[4757]: Trace[2099389530]: ---"Objects listed" error: 10524ms (13:38:51.608) Oct 06 13:38:51 crc kubenswrapper[4757]: Trace[2099389530]: [10.524647635s] [10.524647635s] END Oct 06 13:38:51 crc kubenswrapper[4757]: I1006 13:38:51.609017 4757 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Oct 06 13:38:51 crc kubenswrapper[4757]: I1006 13:38:51.609177 4757 trace.go:236] Trace[1737592708]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (06-Oct-2025 13:38:39.869) (total time: 11739ms): Oct 06 13:38:51 crc kubenswrapper[4757]: Trace[1737592708]: ---"Objects listed" error: 11739ms (13:38:51.608) Oct 06 13:38:51 crc kubenswrapper[4757]: Trace[1737592708]: [11.739282535s] [11.739282535s] END Oct 06 13:38:51 crc kubenswrapper[4757]: I1006 13:38:51.609219 4757 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Oct 06 13:38:51 crc kubenswrapper[4757]: I1006 13:38:51.609949 4757 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Oct 06 13:38:51 crc kubenswrapper[4757]: I1006 13:38:51.663892 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 06 13:38:51 crc kubenswrapper[4757]: I1006 13:38:51.667545 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.105701 4757 apiserver.go:52] "Watching apiserver" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.107751 4757 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.108439 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf","openshift-dns/node-resolver-9j5jn","openshift-kube-apiserver/kube-apiserver-crc","openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c"] Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.109016 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.109073 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 06 13:38:52 crc kubenswrapper[4757]: E1006 13:38:52.109158 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.109352 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.109624 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-9j5jn" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.109896 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 06 13:38:52 crc kubenswrapper[4757]: E1006 13:38:52.109941 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.109995 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.110021 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 06 13:38:52 crc kubenswrapper[4757]: E1006 13:38:52.110047 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.111795 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.112044 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.112060 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.112253 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.113545 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.113633 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.113681 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.114341 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.114962 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.114968 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.115873 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.116229 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.117948 4757 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.134214 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.143084 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9j5jn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de18b9fe-e396-469e-a6f6-d87ce91f3270\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2cslg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9j5jn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.157484 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aab0f26-c4f7-40b2-892f-0c7d8586a1c1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4119269e9f663bc5c38f4fdc8a5b32f1c3d8446fed6fd36172c41207c9c287b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1bef054ed2b7c27f200f7ab089158ba9946ca17b507c85c377504f539812fcf6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb90630132db1396d9009fe35d6d6b8ab6afe59d32f196cf7a10f3179103227\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://123bbe58d8664c8fad1b86c9c321228f7ffe7cdf14c44e1d9be7802e10a3f0d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://813c083e0e8ba3b397a4b2795de1eac82299d7f673832272138f20162b7f2db2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-06T13:38:46Z\\\",\\\"message\\\":\\\"W1006 13:38:35.655009 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1006 13:38:35.655588 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759757915 cert, and key in /tmp/serving-cert-2033919833/serving-signer.crt, /tmp/serving-cert-2033919833/serving-signer.key\\\\nI1006 13:38:35.906575 1 observer_polling.go:159] Starting file observer\\\\nW1006 13:38:35.908632 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1006 13:38:35.908812 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1006 13:38:35.911572 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2033919833/tls.crt::/tmp/serving-cert-2033919833/tls.key\\\\\\\"\\\\nF1006 13:38:46.393904 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:35Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18af66d22d5905beeec34c4d158a88aadb1a8076e423996cd42e4172a31426e0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:35Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca584765a9838801f99c7fb1f7bbcc740e684033b1b19035ef95b72526c6eda1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca584765a9838801f99c7fb1f7bbcc740e684033b1b19035ef95b72526c6eda1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:32Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.169240 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.190431 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.211857 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aab0f26-c4f7-40b2-892f-0c7d8586a1c1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4119269e9f663bc5c38f4fdc8a5b32f1c3d8446fed6fd36172c41207c9c287b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1bef054ed2b7c27f200f7ab089158ba9946ca17b507c85c377504f539812fcf6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb90630132db1396d9009fe35d6d6b8ab6afe59d32f196cf7a10f3179103227\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://123bbe58d8664c8fad1b86c9c321228f7ffe7cdf14c44e1d9be7802e10a3f0d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://813c083e0e8ba3b397a4b2795de1eac82299d7f673832272138f20162b7f2db2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-06T13:38:46Z\\\",\\\"message\\\":\\\"W1006 13:38:35.655009 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1006 13:38:35.655588 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759757915 cert, and key in /tmp/serving-cert-2033919833/serving-signer.crt, /tmp/serving-cert-2033919833/serving-signer.key\\\\nI1006 13:38:35.906575 1 observer_polling.go:159] Starting file observer\\\\nW1006 13:38:35.908632 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1006 13:38:35.908812 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1006 13:38:35.911572 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2033919833/tls.crt::/tmp/serving-cert-2033919833/tls.key\\\\\\\"\\\\nF1006 13:38:46.393904 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:35Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18af66d22d5905beeec34c4d158a88aadb1a8076e423996cd42e4172a31426e0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:35Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca584765a9838801f99c7fb1f7bbcc740e684033b1b19035ef95b72526c6eda1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca584765a9838801f99c7fb1f7bbcc740e684033b1b19035ef95b72526c6eda1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:32Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.212851 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.212921 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.212956 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.212987 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.213020 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.213075 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.213125 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.213158 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.213189 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.213215 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.213280 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.213310 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.213367 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.213396 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.213423 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.213454 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.213481 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.213628 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.213667 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.213698 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.213731 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.213762 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.213795 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.213826 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.213859 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.213899 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.213933 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.213964 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.213994 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.214025 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.214072 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.214122 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.214134 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.214156 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.214187 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.214220 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.214255 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.214284 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.214330 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.214358 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.214387 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.214416 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.214447 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.214479 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.214507 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.214535 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.214565 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.214594 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.214625 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.214660 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.214689 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.214720 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.214751 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.214779 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.214808 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.214838 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.214868 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.214899 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.214929 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.214962 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.214995 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.215037 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.215069 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.215121 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.215153 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.215182 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.215213 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.215243 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.215275 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.215307 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.215340 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.215352 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.215370 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.215401 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.215435 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.215464 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.215495 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.215525 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.215556 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.215586 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.215619 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.215654 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.215687 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.215719 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.215749 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.215782 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.215813 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.215844 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.215876 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.215909 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.215939 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.215969 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.216003 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.216031 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.216062 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.216117 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.216152 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.216184 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.216213 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.216243 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.216273 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.216302 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.216332 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.216365 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.216397 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.216428 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.216459 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.216496 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.216528 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.216563 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.216599 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.216656 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.218828 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.218898 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.218935 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.219036 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.219069 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.219112 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.219263 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.219291 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.219393 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.219424 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.219447 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.219534 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.215614 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.215844 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.216081 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.216359 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.221715 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.216596 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.219154 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.219218 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.219324 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.222075 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.219521 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.222410 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.219544 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.219356 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: E1006 13:38:52.219736 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-06 13:38:52.719714013 +0000 UTC m=+21.217032560 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.223170 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.223235 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.223286 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.223328 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.223369 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.223403 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.223443 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.223482 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.223516 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.223556 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.223600 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.223633 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.223672 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.223709 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.223754 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.223797 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.223859 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.223898 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.223942 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.223992 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.224034 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.224077 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.224141 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.224186 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.224225 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.224264 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.224303 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.224337 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.224377 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.224424 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.224466 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.224713 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.224771 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.224808 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.224852 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.224896 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.224938 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.225016 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.225159 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.225208 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.225246 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.225287 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.223709 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.223912 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.219878 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.219892 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.220113 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.220323 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.220465 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.220703 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.220714 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.220755 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.220872 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.220928 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.220943 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.220955 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.220992 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.221143 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.221268 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.221291 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.221366 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.221522 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.221575 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.221590 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.221593 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.221868 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.221872 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.221970 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.222019 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.224022 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.224145 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.224347 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.224354 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.224643 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.224667 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.219263 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.224689 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.224912 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.225155 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.225283 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.225653 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.225712 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.225929 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.226374 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.226642 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.226767 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.227027 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.227753 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.227991 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.228232 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.228252 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.228494 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.228770 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.228814 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.229000 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.229160 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.229450 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.229532 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.229626 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.229657 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.229879 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.230077 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.230163 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.230199 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.230204 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.230514 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.230640 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.230698 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.230933 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.222024 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.231330 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.237582 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.238750 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.241830 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.242191 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.243241 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.243882 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.244216 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.244434 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.244738 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.244947 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.245028 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.245132 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.245142 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.245230 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.245566 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.245793 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.245817 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.245927 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.246036 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.246153 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.246223 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.246250 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.246261 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.246282 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.246386 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.246500 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.247625 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.247638 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.247971 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.248250 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.248402 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.248421 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.248668 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.249413 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.250332 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.250361 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.250693 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.250873 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.250972 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.250995 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.251126 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.251424 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.251543 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.252070 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.251956 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.252358 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.252498 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.252813 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.252896 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.253261 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.253290 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.253303 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.253339 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.253363 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.253425 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.253443 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.253463 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.253487 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.253509 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.253531 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.253551 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.253553 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.253573 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.253596 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.253619 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.253639 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.253653 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.253662 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.253682 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.253698 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.253714 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.253730 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.253746 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.253761 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.253775 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.253790 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.253806 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.253821 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.253838 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.253852 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.253866 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.254054 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.254154 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.254176 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.254231 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.254490 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.254655 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.254695 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.255199 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.255380 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.255636 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.255709 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.255847 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.255878 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.255888 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.255911 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.255960 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.256022 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.256058 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.256070 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2cslg\" (UniqueName: \"kubernetes.io/projected/de18b9fe-e396-469e-a6f6-d87ce91f3270-kube-api-access-2cslg\") pod \"node-resolver-9j5jn\" (UID: \"de18b9fe-e396-469e-a6f6-d87ce91f3270\") " pod="openshift-dns/node-resolver-9j5jn" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.256152 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.256177 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.256204 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.256247 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.256331 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.256377 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.256402 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.256426 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.256450 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.256473 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.256467 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.256497 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.256521 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.256564 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/de18b9fe-e396-469e-a6f6-d87ce91f3270-hosts-file\") pod \"node-resolver-9j5jn\" (UID: \"de18b9fe-e396-469e-a6f6-d87ce91f3270\") " pod="openshift-dns/node-resolver-9j5jn" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.256883 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: E1006 13:38:52.257077 4757 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 06 13:38:52 crc kubenswrapper[4757]: E1006 13:38:52.257150 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-06 13:38:52.757134476 +0000 UTC m=+21.254453013 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.257266 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 06 13:38:52 crc kubenswrapper[4757]: E1006 13:38:52.257329 4757 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 06 13:38:52 crc kubenswrapper[4757]: E1006 13:38:52.257361 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-06 13:38:52.757354065 +0000 UTC m=+21.254672602 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.257359 4757 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.257383 4757 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.257397 4757 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.257409 4757 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.257419 4757 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.257429 4757 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.257441 4757 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.257451 4757 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.257460 4757 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.257470 4757 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.257478 4757 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.257488 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.257497 4757 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.257507 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.257516 4757 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.257525 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.257533 4757 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.257541 4757 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.257549 4757 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.257557 4757 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.257566 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.257575 4757 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.257583 4757 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.257594 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.257603 4757 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.257612 4757 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.257622 4757 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.257630 4757 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.257639 4757 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.257647 4757 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.257656 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.257665 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.257674 4757 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.257683 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.257691 4757 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.257699 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.257708 4757 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.257716 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.257724 4757 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.257733 4757 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.257741 4757 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.257749 4757 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.257757 4757 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.257765 4757 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.257775 4757 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.257783 4757 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.257791 4757 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.257800 4757 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.257809 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.257818 4757 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.257827 4757 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.257838 4757 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.257847 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.257855 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.257864 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.257783 4757 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.257873 4757 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.257885 4757 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.257895 4757 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.257903 4757 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.257932 4757 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.257940 4757 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.257949 4757 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.257957 4757 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.257965 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.257830 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.257974 4757 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.258024 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.258035 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.258046 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.258149 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.258160 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.258171 4757 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.258179 4757 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.258188 4757 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.258224 4757 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.258234 4757 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.258244 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.258254 4757 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.258263 4757 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.258273 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.258282 4757 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.258291 4757 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.258301 4757 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.258310 4757 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.258319 4757 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.258330 4757 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.258339 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.258349 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.258358 4757 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.258367 4757 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.258376 4757 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.258385 4757 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.258397 4757 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.258113 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.258798 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.258405 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.258837 4757 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.258846 4757 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.258856 4757 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.258864 4757 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.258873 4757 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.258882 4757 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.258891 4757 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.258900 4757 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.258910 4757 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.258919 4757 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.258928 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.258939 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.258948 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.258958 4757 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.258969 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.258979 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.259044 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.259933 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.259954 4757 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.259964 4757 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.259974 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.259983 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.259991 4757 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.260000 4757 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.260007 4757 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.260015 4757 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.260025 4757 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.260033 4757 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.260041 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.260050 4757 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.260058 4757 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.260069 4757 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.260078 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.260086 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.260110 4757 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.260122 4757 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.260131 4757 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.260139 4757 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.260147 4757 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.260157 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.260166 4757 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.260175 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.260184 4757 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.260193 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.260201 4757 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.260210 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.260219 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.260227 4757 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.260237 4757 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.260245 4757 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.260254 4757 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.260262 4757 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.260271 4757 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.260279 4757 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.260289 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.260298 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.260309 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.260317 4757 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.260325 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.260334 4757 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.263048 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.264606 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 06 13:38:52 crc kubenswrapper[4757]: E1006 13:38:52.268979 4757 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 06 13:38:52 crc kubenswrapper[4757]: E1006 13:38:52.269225 4757 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 06 13:38:52 crc kubenswrapper[4757]: E1006 13:38:52.269242 4757 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 06 13:38:52 crc kubenswrapper[4757]: E1006 13:38:52.269429 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-06 13:38:52.769408896 +0000 UTC m=+21.266727503 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.269652 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.269884 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.270195 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.270471 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.271930 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.273123 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.273414 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.273602 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.274385 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.274966 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.275345 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.275434 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: E1006 13:38:52.280233 4757 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 06 13:38:52 crc kubenswrapper[4757]: E1006 13:38:52.280374 4757 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 06 13:38:52 crc kubenswrapper[4757]: E1006 13:38:52.280443 4757 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 06 13:38:52 crc kubenswrapper[4757]: E1006 13:38:52.280543 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-06 13:38:52.780525059 +0000 UTC m=+21.277843596 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.280867 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.280998 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.281217 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.281361 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.286577 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.290678 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.290787 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.290876 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.291141 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.291539 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.293553 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.299354 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.302841 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.302948 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.303220 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.303320 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.303540 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.303620 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.303759 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.303676 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.303776 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.303375 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.304359 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.304380 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.304516 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.305185 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.307057 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.308275 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.308381 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: E1006 13:38:52.310195 4757 kubelet.go:1929] "Failed creating a mirror pod for" err="pods \"kube-apiserver-crc\" already exists" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.312890 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.312953 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.323240 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9j5jn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de18b9fe-e396-469e-a6f6-d87ce91f3270\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2cslg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9j5jn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.323600 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.324133 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.324287 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.330566 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.331970 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.337280 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9j5jn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de18b9fe-e396-469e-a6f6-d87ce91f3270\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2cslg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9j5jn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.346523 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.354717 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.361635 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.361678 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2cslg\" (UniqueName: \"kubernetes.io/projected/de18b9fe-e396-469e-a6f6-d87ce91f3270-kube-api-access-2cslg\") pod \"node-resolver-9j5jn\" (UID: \"de18b9fe-e396-469e-a6f6-d87ce91f3270\") " pod="openshift-dns/node-resolver-9j5jn" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.361742 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.361764 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/de18b9fe-e396-469e-a6f6-d87ce91f3270-hosts-file\") pod \"node-resolver-9j5jn\" (UID: \"de18b9fe-e396-469e-a6f6-d87ce91f3270\") " pod="openshift-dns/node-resolver-9j5jn" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.361835 4757 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.361849 4757 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.361863 4757 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.361877 4757 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.361889 4757 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.361893 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.361900 4757 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.361941 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/de18b9fe-e396-469e-a6f6-d87ce91f3270-hosts-file\") pod \"node-resolver-9j5jn\" (UID: \"de18b9fe-e396-469e-a6f6-d87ce91f3270\") " pod="openshift-dns/node-resolver-9j5jn" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.361946 4757 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.361971 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.361974 4757 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.361999 4757 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.362012 4757 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.362024 4757 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.362037 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.362049 4757 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.362060 4757 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.362072 4757 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.362083 4757 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.362119 4757 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.362131 4757 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.362143 4757 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.362155 4757 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.362167 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.362179 4757 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.362190 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.362202 4757 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.362215 4757 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.362229 4757 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.362241 4757 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.362252 4757 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.362263 4757 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.362274 4757 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.362301 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.362286 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.362467 4757 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.362480 4757 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.362493 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.362505 4757 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.362517 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.362528 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.362539 4757 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.362550 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.362562 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.362572 4757 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.362582 4757 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.370055 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.376633 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2cslg\" (UniqueName: \"kubernetes.io/projected/de18b9fe-e396-469e-a6f6-d87ce91f3270-kube-api-access-2cslg\") pod \"node-resolver-9j5jn\" (UID: \"de18b9fe-e396-469e-a6f6-d87ce91f3270\") " pod="openshift-dns/node-resolver-9j5jn" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.378864 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aab0f26-c4f7-40b2-892f-0c7d8586a1c1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4119269e9f663bc5c38f4fdc8a5b32f1c3d8446fed6fd36172c41207c9c287b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1bef054ed2b7c27f200f7ab089158ba9946ca17b507c85c377504f539812fcf6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb90630132db1396d9009fe35d6d6b8ab6afe59d32f196cf7a10f3179103227\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://123bbe58d8664c8fad1b86c9c321228f7ffe7cdf14c44e1d9be7802e10a3f0d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://813c083e0e8ba3b397a4b2795de1eac82299d7f673832272138f20162b7f2db2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-06T13:38:46Z\\\",\\\"message\\\":\\\"W1006 13:38:35.655009 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1006 13:38:35.655588 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759757915 cert, and key in /tmp/serving-cert-2033919833/serving-signer.crt, /tmp/serving-cert-2033919833/serving-signer.key\\\\nI1006 13:38:35.906575 1 observer_polling.go:159] Starting file observer\\\\nW1006 13:38:35.908632 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1006 13:38:35.908812 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1006 13:38:35.911572 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2033919833/tls.crt::/tmp/serving-cert-2033919833/tls.key\\\\\\\"\\\\nF1006 13:38:46.393904 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:35Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18af66d22d5905beeec34c4d158a88aadb1a8076e423996cd42e4172a31426e0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:35Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca584765a9838801f99c7fb1f7bbcc740e684033b1b19035ef95b72526c6eda1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca584765a9838801f99c7fb1f7bbcc740e684033b1b19035ef95b72526c6eda1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:32Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.396734 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.406218 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.426469 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.432948 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.439961 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-9j5jn" Oct 06 13:38:52 crc kubenswrapper[4757]: W1006 13:38:52.441460 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod37a5e44f_9a88_4405_be8a_b645485e7312.slice/crio-f45ab46176a2bc01839a07eb7f02b2b40fef7f9d88585db662e8041ccaca62bb WatchSource:0}: Error finding container f45ab46176a2bc01839a07eb7f02b2b40fef7f9d88585db662e8041ccaca62bb: Status 404 returned error can't find the container with id f45ab46176a2bc01839a07eb7f02b2b40fef7f9d88585db662e8041ccaca62bb Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.447748 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 06 13:38:52 crc kubenswrapper[4757]: W1006 13:38:52.463043 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podde18b9fe_e396_469e_a6f6_d87ce91f3270.slice/crio-c859200db77ee0d216e9c93cd14a8d1bddf4fd62c0bdeb3ab849cb6773c4bb32 WatchSource:0}: Error finding container c859200db77ee0d216e9c93cd14a8d1bddf4fd62c0bdeb3ab849cb6773c4bb32: Status 404 returned error can't find the container with id c859200db77ee0d216e9c93cd14a8d1bddf4fd62c0bdeb3ab849cb6773c4bb32 Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.635075 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-7tb7h"] Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.635952 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.636380 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-rhrzr"] Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.637377 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-9qf7z"] Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.637570 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-9qf7z" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.637859 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-rhrzr" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.639627 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.639682 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.639897 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.639918 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.640239 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.640351 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.640571 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.640650 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.640724 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.640777 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.641849 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.641885 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.656053 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.667061 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aab0f26-c4f7-40b2-892f-0c7d8586a1c1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4119269e9f663bc5c38f4fdc8a5b32f1c3d8446fed6fd36172c41207c9c287b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1bef054ed2b7c27f200f7ab089158ba9946ca17b507c85c377504f539812fcf6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb90630132db1396d9009fe35d6d6b8ab6afe59d32f196cf7a10f3179103227\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://123bbe58d8664c8fad1b86c9c321228f7ffe7cdf14c44e1d9be7802e10a3f0d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://813c083e0e8ba3b397a4b2795de1eac82299d7f673832272138f20162b7f2db2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-06T13:38:46Z\\\",\\\"message\\\":\\\"W1006 13:38:35.655009 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1006 13:38:35.655588 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759757915 cert, and key in /tmp/serving-cert-2033919833/serving-signer.crt, /tmp/serving-cert-2033919833/serving-signer.key\\\\nI1006 13:38:35.906575 1 observer_polling.go:159] Starting file observer\\\\nW1006 13:38:35.908632 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1006 13:38:35.908812 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1006 13:38:35.911572 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2033919833/tls.crt::/tmp/serving-cert-2033919833/tls.key\\\\\\\"\\\\nF1006 13:38:46.393904 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:35Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18af66d22d5905beeec34c4d158a88aadb1a8076e423996cd42e4172a31426e0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:35Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca584765a9838801f99c7fb1f7bbcc740e684033b1b19035ef95b72526c6eda1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca584765a9838801f99c7fb1f7bbcc740e684033b1b19035ef95b72526c6eda1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:32Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.677743 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.687820 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.696649 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9j5jn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de18b9fe-e396-469e-a6f6-d87ce91f3270\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2cslg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9j5jn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.708157 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.723847 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.733630 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0010c888-d5ad-4b2b-8309-1647fdf0dee3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kvf5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kvf5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-7tb7h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.745836 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.758750 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.767323 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.767414 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/9144d9fd-70d7-4a29-8e6b-c020c611980a-multus-conf-dir\") pod \"multus-9qf7z\" (UID: \"9144d9fd-70d7-4a29-8e6b-c020c611980a\") " pod="openshift-multus/multus-9qf7z" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.767437 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/62d4cce6-0583-40a6-b7ea-2996d07b49b9-os-release\") pod \"multus-additional-cni-plugins-rhrzr\" (UID: \"62d4cce6-0583-40a6-b7ea-2996d07b49b9\") " pod="openshift-multus/multus-additional-cni-plugins-rhrzr" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.767456 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/9144d9fd-70d7-4a29-8e6b-c020c611980a-multus-socket-dir-parent\") pod \"multus-9qf7z\" (UID: \"9144d9fd-70d7-4a29-8e6b-c020c611980a\") " pod="openshift-multus/multus-9qf7z" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.767471 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/9144d9fd-70d7-4a29-8e6b-c020c611980a-host-run-k8s-cni-cncf-io\") pod \"multus-9qf7z\" (UID: \"9144d9fd-70d7-4a29-8e6b-c020c611980a\") " pod="openshift-multus/multus-9qf7z" Oct 06 13:38:52 crc kubenswrapper[4757]: E1006 13:38:52.767560 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-06 13:38:53.767526322 +0000 UTC m=+22.264844879 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.767670 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-brz4x\" (UniqueName: \"kubernetes.io/projected/62d4cce6-0583-40a6-b7ea-2996d07b49b9-kube-api-access-brz4x\") pod \"multus-additional-cni-plugins-rhrzr\" (UID: \"62d4cce6-0583-40a6-b7ea-2996d07b49b9\") " pod="openshift-multus/multus-additional-cni-plugins-rhrzr" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.767714 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/9144d9fd-70d7-4a29-8e6b-c020c611980a-os-release\") pod \"multus-9qf7z\" (UID: \"9144d9fd-70d7-4a29-8e6b-c020c611980a\") " pod="openshift-multus/multus-9qf7z" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.767737 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/9144d9fd-70d7-4a29-8e6b-c020c611980a-etc-kubernetes\") pod \"multus-9qf7z\" (UID: \"9144d9fd-70d7-4a29-8e6b-c020c611980a\") " pod="openshift-multus/multus-9qf7z" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.767768 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0010c888-d5ad-4b2b-8309-1647fdf0dee3-proxy-tls\") pod \"machine-config-daemon-7tb7h\" (UID: \"0010c888-d5ad-4b2b-8309-1647fdf0dee3\") " pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.767806 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/62d4cce6-0583-40a6-b7ea-2996d07b49b9-cni-binary-copy\") pod \"multus-additional-cni-plugins-rhrzr\" (UID: \"62d4cce6-0583-40a6-b7ea-2996d07b49b9\") " pod="openshift-multus/multus-additional-cni-plugins-rhrzr" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.767861 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kvf5t\" (UniqueName: \"kubernetes.io/projected/0010c888-d5ad-4b2b-8309-1647fdf0dee3-kube-api-access-kvf5t\") pod \"machine-config-daemon-7tb7h\" (UID: \"0010c888-d5ad-4b2b-8309-1647fdf0dee3\") " pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.767888 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/9144d9fd-70d7-4a29-8e6b-c020c611980a-multus-cni-dir\") pod \"multus-9qf7z\" (UID: \"9144d9fd-70d7-4a29-8e6b-c020c611980a\") " pod="openshift-multus/multus-9qf7z" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.767910 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mf4fb\" (UniqueName: \"kubernetes.io/projected/9144d9fd-70d7-4a29-8e6b-c020c611980a-kube-api-access-mf4fb\") pod \"multus-9qf7z\" (UID: \"9144d9fd-70d7-4a29-8e6b-c020c611980a\") " pod="openshift-multus/multus-9qf7z" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.767927 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/62d4cce6-0583-40a6-b7ea-2996d07b49b9-system-cni-dir\") pod \"multus-additional-cni-plugins-rhrzr\" (UID: \"62d4cce6-0583-40a6-b7ea-2996d07b49b9\") " pod="openshift-multus/multus-additional-cni-plugins-rhrzr" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.767974 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/62d4cce6-0583-40a6-b7ea-2996d07b49b9-cnibin\") pod \"multus-additional-cni-plugins-rhrzr\" (UID: \"62d4cce6-0583-40a6-b7ea-2996d07b49b9\") " pod="openshift-multus/multus-additional-cni-plugins-rhrzr" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.768023 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0010c888-d5ad-4b2b-8309-1647fdf0dee3-mcd-auth-proxy-config\") pod \"machine-config-daemon-7tb7h\" (UID: \"0010c888-d5ad-4b2b-8309-1647fdf0dee3\") " pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.768061 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.768090 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/9144d9fd-70d7-4a29-8e6b-c020c611980a-cnibin\") pod \"multus-9qf7z\" (UID: \"9144d9fd-70d7-4a29-8e6b-c020c611980a\") " pod="openshift-multus/multus-9qf7z" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.768138 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/9144d9fd-70d7-4a29-8e6b-c020c611980a-multus-daemon-config\") pod \"multus-9qf7z\" (UID: \"9144d9fd-70d7-4a29-8e6b-c020c611980a\") " pod="openshift-multus/multus-9qf7z" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.768159 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/0010c888-d5ad-4b2b-8309-1647fdf0dee3-rootfs\") pod \"machine-config-daemon-7tb7h\" (UID: \"0010c888-d5ad-4b2b-8309-1647fdf0dee3\") " pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.768178 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/9144d9fd-70d7-4a29-8e6b-c020c611980a-cni-binary-copy\") pod \"multus-9qf7z\" (UID: \"9144d9fd-70d7-4a29-8e6b-c020c611980a\") " pod="openshift-multus/multus-9qf7z" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.768198 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/9144d9fd-70d7-4a29-8e6b-c020c611980a-host-var-lib-cni-multus\") pod \"multus-9qf7z\" (UID: \"9144d9fd-70d7-4a29-8e6b-c020c611980a\") " pod="openshift-multus/multus-9qf7z" Oct 06 13:38:52 crc kubenswrapper[4757]: E1006 13:38:52.768203 4757 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.768224 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.768247 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/9144d9fd-70d7-4a29-8e6b-c020c611980a-host-run-netns\") pod \"multus-9qf7z\" (UID: \"9144d9fd-70d7-4a29-8e6b-c020c611980a\") " pod="openshift-multus/multus-9qf7z" Oct 06 13:38:52 crc kubenswrapper[4757]: E1006 13:38:52.768286 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-06 13:38:53.768251651 +0000 UTC m=+22.265570188 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 06 13:38:52 crc kubenswrapper[4757]: E1006 13:38:52.768303 4757 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.768317 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/62d4cce6-0583-40a6-b7ea-2996d07b49b9-tuning-conf-dir\") pod \"multus-additional-cni-plugins-rhrzr\" (UID: \"62d4cce6-0583-40a6-b7ea-2996d07b49b9\") " pod="openshift-multus/multus-additional-cni-plugins-rhrzr" Oct 06 13:38:52 crc kubenswrapper[4757]: E1006 13:38:52.768352 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-06 13:38:53.768336494 +0000 UTC m=+22.265655031 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.768367 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/9144d9fd-70d7-4a29-8e6b-c020c611980a-system-cni-dir\") pod \"multus-9qf7z\" (UID: \"9144d9fd-70d7-4a29-8e6b-c020c611980a\") " pod="openshift-multus/multus-9qf7z" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.768396 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/9144d9fd-70d7-4a29-8e6b-c020c611980a-host-var-lib-kubelet\") pod \"multus-9qf7z\" (UID: \"9144d9fd-70d7-4a29-8e6b-c020c611980a\") " pod="openshift-multus/multus-9qf7z" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.768429 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/9144d9fd-70d7-4a29-8e6b-c020c611980a-hostroot\") pod \"multus-9qf7z\" (UID: \"9144d9fd-70d7-4a29-8e6b-c020c611980a\") " pod="openshift-multus/multus-9qf7z" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.768455 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/62d4cce6-0583-40a6-b7ea-2996d07b49b9-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-rhrzr\" (UID: \"62d4cce6-0583-40a6-b7ea-2996d07b49b9\") " pod="openshift-multus/multus-additional-cni-plugins-rhrzr" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.768476 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/9144d9fd-70d7-4a29-8e6b-c020c611980a-host-var-lib-cni-bin\") pod \"multus-9qf7z\" (UID: \"9144d9fd-70d7-4a29-8e6b-c020c611980a\") " pod="openshift-multus/multus-9qf7z" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.768500 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/9144d9fd-70d7-4a29-8e6b-c020c611980a-host-run-multus-certs\") pod \"multus-9qf7z\" (UID: \"9144d9fd-70d7-4a29-8e6b-c020c611980a\") " pod="openshift-multus/multus-9qf7z" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.773945 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.785854 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.798687 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.805576 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9j5jn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de18b9fe-e396-469e-a6f6-d87ce91f3270\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2cslg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9j5jn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.813477 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.824722 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aab0f26-c4f7-40b2-892f-0c7d8586a1c1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4119269e9f663bc5c38f4fdc8a5b32f1c3d8446fed6fd36172c41207c9c287b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1bef054ed2b7c27f200f7ab089158ba9946ca17b507c85c377504f539812fcf6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb90630132db1396d9009fe35d6d6b8ab6afe59d32f196cf7a10f3179103227\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://123bbe58d8664c8fad1b86c9c321228f7ffe7cdf14c44e1d9be7802e10a3f0d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://813c083e0e8ba3b397a4b2795de1eac82299d7f673832272138f20162b7f2db2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-06T13:38:46Z\\\",\\\"message\\\":\\\"W1006 13:38:35.655009 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1006 13:38:35.655588 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759757915 cert, and key in /tmp/serving-cert-2033919833/serving-signer.crt, /tmp/serving-cert-2033919833/serving-signer.key\\\\nI1006 13:38:35.906575 1 observer_polling.go:159] Starting file observer\\\\nW1006 13:38:35.908632 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1006 13:38:35.908812 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1006 13:38:35.911572 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2033919833/tls.crt::/tmp/serving-cert-2033919833/tls.key\\\\\\\"\\\\nF1006 13:38:46.393904 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:35Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18af66d22d5905beeec34c4d158a88aadb1a8076e423996cd42e4172a31426e0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:35Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca584765a9838801f99c7fb1f7bbcc740e684033b1b19035ef95b72526c6eda1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca584765a9838801f99c7fb1f7bbcc740e684033b1b19035ef95b72526c6eda1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:32Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.834997 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.846863 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rhrzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62d4cce6-0583-40a6-b7ea-2996d07b49b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rhrzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.854138 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0010c888-d5ad-4b2b-8309-1647fdf0dee3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kvf5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kvf5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-7tb7h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.861691 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9qf7z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9144d9fd-70d7-4a29-8e6b-c020c611980a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mf4fb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9qf7z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.869052 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-brz4x\" (UniqueName: \"kubernetes.io/projected/62d4cce6-0583-40a6-b7ea-2996d07b49b9-kube-api-access-brz4x\") pod \"multus-additional-cni-plugins-rhrzr\" (UID: \"62d4cce6-0583-40a6-b7ea-2996d07b49b9\") " pod="openshift-multus/multus-additional-cni-plugins-rhrzr" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.869107 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/9144d9fd-70d7-4a29-8e6b-c020c611980a-os-release\") pod \"multus-9qf7z\" (UID: \"9144d9fd-70d7-4a29-8e6b-c020c611980a\") " pod="openshift-multus/multus-9qf7z" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.869124 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/9144d9fd-70d7-4a29-8e6b-c020c611980a-etc-kubernetes\") pod \"multus-9qf7z\" (UID: \"9144d9fd-70d7-4a29-8e6b-c020c611980a\") " pod="openshift-multus/multus-9qf7z" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.869141 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0010c888-d5ad-4b2b-8309-1647fdf0dee3-proxy-tls\") pod \"machine-config-daemon-7tb7h\" (UID: \"0010c888-d5ad-4b2b-8309-1647fdf0dee3\") " pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.869160 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/62d4cce6-0583-40a6-b7ea-2996d07b49b9-cni-binary-copy\") pod \"multus-additional-cni-plugins-rhrzr\" (UID: \"62d4cce6-0583-40a6-b7ea-2996d07b49b9\") " pod="openshift-multus/multus-additional-cni-plugins-rhrzr" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.869177 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mf4fb\" (UniqueName: \"kubernetes.io/projected/9144d9fd-70d7-4a29-8e6b-c020c611980a-kube-api-access-mf4fb\") pod \"multus-9qf7z\" (UID: \"9144d9fd-70d7-4a29-8e6b-c020c611980a\") " pod="openshift-multus/multus-9qf7z" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.869203 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.869219 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kvf5t\" (UniqueName: \"kubernetes.io/projected/0010c888-d5ad-4b2b-8309-1647fdf0dee3-kube-api-access-kvf5t\") pod \"machine-config-daemon-7tb7h\" (UID: \"0010c888-d5ad-4b2b-8309-1647fdf0dee3\") " pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.869235 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/9144d9fd-70d7-4a29-8e6b-c020c611980a-multus-cni-dir\") pod \"multus-9qf7z\" (UID: \"9144d9fd-70d7-4a29-8e6b-c020c611980a\") " pod="openshift-multus/multus-9qf7z" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.869250 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/62d4cce6-0583-40a6-b7ea-2996d07b49b9-system-cni-dir\") pod \"multus-additional-cni-plugins-rhrzr\" (UID: \"62d4cce6-0583-40a6-b7ea-2996d07b49b9\") " pod="openshift-multus/multus-additional-cni-plugins-rhrzr" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.869264 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/62d4cce6-0583-40a6-b7ea-2996d07b49b9-cnibin\") pod \"multus-additional-cni-plugins-rhrzr\" (UID: \"62d4cce6-0583-40a6-b7ea-2996d07b49b9\") " pod="openshift-multus/multus-additional-cni-plugins-rhrzr" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.869260 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/9144d9fd-70d7-4a29-8e6b-c020c611980a-etc-kubernetes\") pod \"multus-9qf7z\" (UID: \"9144d9fd-70d7-4a29-8e6b-c020c611980a\") " pod="openshift-multus/multus-9qf7z" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.870049 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/62d4cce6-0583-40a6-b7ea-2996d07b49b9-system-cni-dir\") pod \"multus-additional-cni-plugins-rhrzr\" (UID: \"62d4cce6-0583-40a6-b7ea-2996d07b49b9\") " pod="openshift-multus/multus-additional-cni-plugins-rhrzr" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.870074 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0010c888-d5ad-4b2b-8309-1647fdf0dee3-mcd-auth-proxy-config\") pod \"machine-config-daemon-7tb7h\" (UID: \"0010c888-d5ad-4b2b-8309-1647fdf0dee3\") " pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.870087 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/62d4cce6-0583-40a6-b7ea-2996d07b49b9-cni-binary-copy\") pod \"multus-additional-cni-plugins-rhrzr\" (UID: \"62d4cce6-0583-40a6-b7ea-2996d07b49b9\") " pod="openshift-multus/multus-additional-cni-plugins-rhrzr" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.870135 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/62d4cce6-0583-40a6-b7ea-2996d07b49b9-cnibin\") pod \"multus-additional-cni-plugins-rhrzr\" (UID: \"62d4cce6-0583-40a6-b7ea-2996d07b49b9\") " pod="openshift-multus/multus-additional-cni-plugins-rhrzr" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.869280 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0010c888-d5ad-4b2b-8309-1647fdf0dee3-mcd-auth-proxy-config\") pod \"machine-config-daemon-7tb7h\" (UID: \"0010c888-d5ad-4b2b-8309-1647fdf0dee3\") " pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.870232 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/9144d9fd-70d7-4a29-8e6b-c020c611980a-os-release\") pod \"multus-9qf7z\" (UID: \"9144d9fd-70d7-4a29-8e6b-c020c611980a\") " pod="openshift-multus/multus-9qf7z" Oct 06 13:38:52 crc kubenswrapper[4757]: E1006 13:38:52.870245 4757 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 06 13:38:52 crc kubenswrapper[4757]: E1006 13:38:52.870261 4757 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 06 13:38:52 crc kubenswrapper[4757]: E1006 13:38:52.870274 4757 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.870290 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/9144d9fd-70d7-4a29-8e6b-c020c611980a-cnibin\") pod \"multus-9qf7z\" (UID: \"9144d9fd-70d7-4a29-8e6b-c020c611980a\") " pod="openshift-multus/multus-9qf7z" Oct 06 13:38:52 crc kubenswrapper[4757]: E1006 13:38:52.870311 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-06 13:38:53.870298415 +0000 UTC m=+22.367616952 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.870256 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/9144d9fd-70d7-4a29-8e6b-c020c611980a-multus-cni-dir\") pod \"multus-9qf7z\" (UID: \"9144d9fd-70d7-4a29-8e6b-c020c611980a\") " pod="openshift-multus/multus-9qf7z" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.870331 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/9144d9fd-70d7-4a29-8e6b-c020c611980a-multus-daemon-config\") pod \"multus-9qf7z\" (UID: \"9144d9fd-70d7-4a29-8e6b-c020c611980a\") " pod="openshift-multus/multus-9qf7z" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.870418 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/9144d9fd-70d7-4a29-8e6b-c020c611980a-host-var-lib-cni-multus\") pod \"multus-9qf7z\" (UID: \"9144d9fd-70d7-4a29-8e6b-c020c611980a\") " pod="openshift-multus/multus-9qf7z" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.870450 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/0010c888-d5ad-4b2b-8309-1647fdf0dee3-rootfs\") pod \"machine-config-daemon-7tb7h\" (UID: \"0010c888-d5ad-4b2b-8309-1647fdf0dee3\") " pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.870478 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/9144d9fd-70d7-4a29-8e6b-c020c611980a-cni-binary-copy\") pod \"multus-9qf7z\" (UID: \"9144d9fd-70d7-4a29-8e6b-c020c611980a\") " pod="openshift-multus/multus-9qf7z" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.870523 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/9144d9fd-70d7-4a29-8e6b-c020c611980a-host-run-netns\") pod \"multus-9qf7z\" (UID: \"9144d9fd-70d7-4a29-8e6b-c020c611980a\") " pod="openshift-multus/multus-9qf7z" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.870550 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/62d4cce6-0583-40a6-b7ea-2996d07b49b9-tuning-conf-dir\") pod \"multus-additional-cni-plugins-rhrzr\" (UID: \"62d4cce6-0583-40a6-b7ea-2996d07b49b9\") " pod="openshift-multus/multus-additional-cni-plugins-rhrzr" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.870571 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/9144d9fd-70d7-4a29-8e6b-c020c611980a-system-cni-dir\") pod \"multus-9qf7z\" (UID: \"9144d9fd-70d7-4a29-8e6b-c020c611980a\") " pod="openshift-multus/multus-9qf7z" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.870591 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/9144d9fd-70d7-4a29-8e6b-c020c611980a-host-var-lib-kubelet\") pod \"multus-9qf7z\" (UID: \"9144d9fd-70d7-4a29-8e6b-c020c611980a\") " pod="openshift-multus/multus-9qf7z" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.870608 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/9144d9fd-70d7-4a29-8e6b-c020c611980a-hostroot\") pod \"multus-9qf7z\" (UID: \"9144d9fd-70d7-4a29-8e6b-c020c611980a\") " pod="openshift-multus/multus-9qf7z" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.870633 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/62d4cce6-0583-40a6-b7ea-2996d07b49b9-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-rhrzr\" (UID: \"62d4cce6-0583-40a6-b7ea-2996d07b49b9\") " pod="openshift-multus/multus-additional-cni-plugins-rhrzr" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.870678 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/9144d9fd-70d7-4a29-8e6b-c020c611980a-host-var-lib-cni-multus\") pod \"multus-9qf7z\" (UID: \"9144d9fd-70d7-4a29-8e6b-c020c611980a\") " pod="openshift-multus/multus-9qf7z" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.870690 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/9144d9fd-70d7-4a29-8e6b-c020c611980a-host-var-lib-cni-bin\") pod \"multus-9qf7z\" (UID: \"9144d9fd-70d7-4a29-8e6b-c020c611980a\") " pod="openshift-multus/multus-9qf7z" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.870718 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/9144d9fd-70d7-4a29-8e6b-c020c611980a-host-run-multus-certs\") pod \"multus-9qf7z\" (UID: \"9144d9fd-70d7-4a29-8e6b-c020c611980a\") " pod="openshift-multus/multus-9qf7z" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.870740 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/9144d9fd-70d7-4a29-8e6b-c020c611980a-host-var-lib-kubelet\") pod \"multus-9qf7z\" (UID: \"9144d9fd-70d7-4a29-8e6b-c020c611980a\") " pod="openshift-multus/multus-9qf7z" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.870744 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/9144d9fd-70d7-4a29-8e6b-c020c611980a-host-run-k8s-cni-cncf-io\") pod \"multus-9qf7z\" (UID: \"9144d9fd-70d7-4a29-8e6b-c020c611980a\") " pod="openshift-multus/multus-9qf7z" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.870766 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/9144d9fd-70d7-4a29-8e6b-c020c611980a-host-run-netns\") pod \"multus-9qf7z\" (UID: \"9144d9fd-70d7-4a29-8e6b-c020c611980a\") " pod="openshift-multus/multus-9qf7z" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.870771 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/9144d9fd-70d7-4a29-8e6b-c020c611980a-multus-conf-dir\") pod \"multus-9qf7z\" (UID: \"9144d9fd-70d7-4a29-8e6b-c020c611980a\") " pod="openshift-multus/multus-9qf7z" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.870795 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/62d4cce6-0583-40a6-b7ea-2996d07b49b9-os-release\") pod \"multus-additional-cni-plugins-rhrzr\" (UID: \"62d4cce6-0583-40a6-b7ea-2996d07b49b9\") " pod="openshift-multus/multus-additional-cni-plugins-rhrzr" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.870815 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/9144d9fd-70d7-4a29-8e6b-c020c611980a-multus-socket-dir-parent\") pod \"multus-9qf7z\" (UID: \"9144d9fd-70d7-4a29-8e6b-c020c611980a\") " pod="openshift-multus/multus-9qf7z" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.870842 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 06 13:38:52 crc kubenswrapper[4757]: E1006 13:38:52.870988 4757 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 06 13:38:52 crc kubenswrapper[4757]: E1006 13:38:52.871011 4757 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 06 13:38:52 crc kubenswrapper[4757]: E1006 13:38:52.871025 4757 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.871054 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/9144d9fd-70d7-4a29-8e6b-c020c611980a-system-cni-dir\") pod \"multus-9qf7z\" (UID: \"9144d9fd-70d7-4a29-8e6b-c020c611980a\") " pod="openshift-multus/multus-9qf7z" Oct 06 13:38:52 crc kubenswrapper[4757]: E1006 13:38:52.871070 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-06 13:38:53.871056356 +0000 UTC m=+22.368374963 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.871126 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/9144d9fd-70d7-4a29-8e6b-c020c611980a-hostroot\") pod \"multus-9qf7z\" (UID: \"9144d9fd-70d7-4a29-8e6b-c020c611980a\") " pod="openshift-multus/multus-9qf7z" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.871131 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/9144d9fd-70d7-4a29-8e6b-c020c611980a-host-run-k8s-cni-cncf-io\") pod \"multus-9qf7z\" (UID: \"9144d9fd-70d7-4a29-8e6b-c020c611980a\") " pod="openshift-multus/multus-9qf7z" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.871168 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/9144d9fd-70d7-4a29-8e6b-c020c611980a-multus-conf-dir\") pod \"multus-9qf7z\" (UID: \"9144d9fd-70d7-4a29-8e6b-c020c611980a\") " pod="openshift-multus/multus-9qf7z" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.871225 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/62d4cce6-0583-40a6-b7ea-2996d07b49b9-os-release\") pod \"multus-additional-cni-plugins-rhrzr\" (UID: \"62d4cce6-0583-40a6-b7ea-2996d07b49b9\") " pod="openshift-multus/multus-additional-cni-plugins-rhrzr" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.870989 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/9144d9fd-70d7-4a29-8e6b-c020c611980a-multus-daemon-config\") pod \"multus-9qf7z\" (UID: \"9144d9fd-70d7-4a29-8e6b-c020c611980a\") " pod="openshift-multus/multus-9qf7z" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.871227 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/9144d9fd-70d7-4a29-8e6b-c020c611980a-cni-binary-copy\") pod \"multus-9qf7z\" (UID: \"9144d9fd-70d7-4a29-8e6b-c020c611980a\") " pod="openshift-multus/multus-9qf7z" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.871024 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/9144d9fd-70d7-4a29-8e6b-c020c611980a-cnibin\") pod \"multus-9qf7z\" (UID: \"9144d9fd-70d7-4a29-8e6b-c020c611980a\") " pod="openshift-multus/multus-9qf7z" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.871283 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/9144d9fd-70d7-4a29-8e6b-c020c611980a-multus-socket-dir-parent\") pod \"multus-9qf7z\" (UID: \"9144d9fd-70d7-4a29-8e6b-c020c611980a\") " pod="openshift-multus/multus-9qf7z" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.870718 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/0010c888-d5ad-4b2b-8309-1647fdf0dee3-rootfs\") pod \"machine-config-daemon-7tb7h\" (UID: \"0010c888-d5ad-4b2b-8309-1647fdf0dee3\") " pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.871298 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/9144d9fd-70d7-4a29-8e6b-c020c611980a-host-run-multus-certs\") pod \"multus-9qf7z\" (UID: \"9144d9fd-70d7-4a29-8e6b-c020c611980a\") " pod="openshift-multus/multus-9qf7z" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.871317 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/9144d9fd-70d7-4a29-8e6b-c020c611980a-host-var-lib-cni-bin\") pod \"multus-9qf7z\" (UID: \"9144d9fd-70d7-4a29-8e6b-c020c611980a\") " pod="openshift-multus/multus-9qf7z" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.871542 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/62d4cce6-0583-40a6-b7ea-2996d07b49b9-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-rhrzr\" (UID: \"62d4cce6-0583-40a6-b7ea-2996d07b49b9\") " pod="openshift-multus/multus-additional-cni-plugins-rhrzr" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.872433 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/62d4cce6-0583-40a6-b7ea-2996d07b49b9-tuning-conf-dir\") pod \"multus-additional-cni-plugins-rhrzr\" (UID: \"62d4cce6-0583-40a6-b7ea-2996d07b49b9\") " pod="openshift-multus/multus-additional-cni-plugins-rhrzr" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.875483 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0010c888-d5ad-4b2b-8309-1647fdf0dee3-proxy-tls\") pod \"machine-config-daemon-7tb7h\" (UID: \"0010c888-d5ad-4b2b-8309-1647fdf0dee3\") " pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.885123 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mf4fb\" (UniqueName: \"kubernetes.io/projected/9144d9fd-70d7-4a29-8e6b-c020c611980a-kube-api-access-mf4fb\") pod \"multus-9qf7z\" (UID: \"9144d9fd-70d7-4a29-8e6b-c020c611980a\") " pod="openshift-multus/multus-9qf7z" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.885753 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-brz4x\" (UniqueName: \"kubernetes.io/projected/62d4cce6-0583-40a6-b7ea-2996d07b49b9-kube-api-access-brz4x\") pod \"multus-additional-cni-plugins-rhrzr\" (UID: \"62d4cce6-0583-40a6-b7ea-2996d07b49b9\") " pod="openshift-multus/multus-additional-cni-plugins-rhrzr" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.886433 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kvf5t\" (UniqueName: \"kubernetes.io/projected/0010c888-d5ad-4b2b-8309-1647fdf0dee3-kube-api-access-kvf5t\") pod \"machine-config-daemon-7tb7h\" (UID: \"0010c888-d5ad-4b2b-8309-1647fdf0dee3\") " pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" Oct 06 13:38:52 crc kubenswrapper[4757]: I1006 13:38:52.993738 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.000404 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-58bhb"] Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.001207 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.003296 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.003431 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.003621 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.003667 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.004394 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.004805 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.005958 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.013806 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.025752 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-9qf7z" Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.027387 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-rhrzr" Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.032004 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6624d05-e024-49f2-bf87-33e7ea4fccbb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-58bhb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.046036 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 06 13:38:53 crc kubenswrapper[4757]: W1006 13:38:53.046111 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod62d4cce6_0583_40a6_b7ea_2996d07b49b9.slice/crio-cca246bca508f7fad44c78b78b0d6bd3b8fbbbcf983d953f2f1b303f3f4c1a21 WatchSource:0}: Error finding container cca246bca508f7fad44c78b78b0d6bd3b8fbbbcf983d953f2f1b303f3f4c1a21: Status 404 returned error can't find the container with id cca246bca508f7fad44c78b78b0d6bd3b8fbbbcf983d953f2f1b303f3f4c1a21 Oct 06 13:38:53 crc kubenswrapper[4757]: W1006 13:38:53.047595 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9144d9fd_70d7_4a29_8e6b_c020c611980a.slice/crio-abeedf312dd30fafe4105b7347eb58c52239e76f9336d588117ea30245957298 WatchSource:0}: Error finding container abeedf312dd30fafe4105b7347eb58c52239e76f9336d588117ea30245957298: Status 404 returned error can't find the container with id abeedf312dd30fafe4105b7347eb58c52239e76f9336d588117ea30245957298 Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.055330 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9j5jn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de18b9fe-e396-469e-a6f6-d87ce91f3270\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2cslg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9j5jn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.072470 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/a6624d05-e024-49f2-bf87-33e7ea4fccbb-log-socket\") pod \"ovnkube-node-58bhb\" (UID: \"a6624d05-e024-49f2-bf87-33e7ea4fccbb\") " pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.072521 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/a6624d05-e024-49f2-bf87-33e7ea4fccbb-ovnkube-script-lib\") pod \"ovnkube-node-58bhb\" (UID: \"a6624d05-e024-49f2-bf87-33e7ea4fccbb\") " pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.072546 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/a6624d05-e024-49f2-bf87-33e7ea4fccbb-ovn-node-metrics-cert\") pod \"ovnkube-node-58bhb\" (UID: \"a6624d05-e024-49f2-bf87-33e7ea4fccbb\") " pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.072568 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/a6624d05-e024-49f2-bf87-33e7ea4fccbb-host-run-ovn-kubernetes\") pod \"ovnkube-node-58bhb\" (UID: \"a6624d05-e024-49f2-bf87-33e7ea4fccbb\") " pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.072601 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/a6624d05-e024-49f2-bf87-33e7ea4fccbb-host-slash\") pod \"ovnkube-node-58bhb\" (UID: \"a6624d05-e024-49f2-bf87-33e7ea4fccbb\") " pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.072621 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/a6624d05-e024-49f2-bf87-33e7ea4fccbb-node-log\") pod \"ovnkube-node-58bhb\" (UID: \"a6624d05-e024-49f2-bf87-33e7ea4fccbb\") " pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.072641 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/a6624d05-e024-49f2-bf87-33e7ea4fccbb-host-cni-netd\") pod \"ovnkube-node-58bhb\" (UID: \"a6624d05-e024-49f2-bf87-33e7ea4fccbb\") " pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.072666 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/a6624d05-e024-49f2-bf87-33e7ea4fccbb-run-systemd\") pod \"ovnkube-node-58bhb\" (UID: \"a6624d05-e024-49f2-bf87-33e7ea4fccbb\") " pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.072684 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/a6624d05-e024-49f2-bf87-33e7ea4fccbb-env-overrides\") pod \"ovnkube-node-58bhb\" (UID: \"a6624d05-e024-49f2-bf87-33e7ea4fccbb\") " pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.072713 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/a6624d05-e024-49f2-bf87-33e7ea4fccbb-host-kubelet\") pod \"ovnkube-node-58bhb\" (UID: \"a6624d05-e024-49f2-bf87-33e7ea4fccbb\") " pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.072792 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/a6624d05-e024-49f2-bf87-33e7ea4fccbb-var-lib-openvswitch\") pod \"ovnkube-node-58bhb\" (UID: \"a6624d05-e024-49f2-bf87-33e7ea4fccbb\") " pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.072826 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/a6624d05-e024-49f2-bf87-33e7ea4fccbb-run-ovn\") pod \"ovnkube-node-58bhb\" (UID: \"a6624d05-e024-49f2-bf87-33e7ea4fccbb\") " pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.072858 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/a6624d05-e024-49f2-bf87-33e7ea4fccbb-etc-openvswitch\") pod \"ovnkube-node-58bhb\" (UID: \"a6624d05-e024-49f2-bf87-33e7ea4fccbb\") " pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.072882 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/a6624d05-e024-49f2-bf87-33e7ea4fccbb-host-run-netns\") pod \"ovnkube-node-58bhb\" (UID: \"a6624d05-e024-49f2-bf87-33e7ea4fccbb\") " pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.072897 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/a6624d05-e024-49f2-bf87-33e7ea4fccbb-host-cni-bin\") pod \"ovnkube-node-58bhb\" (UID: \"a6624d05-e024-49f2-bf87-33e7ea4fccbb\") " pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.072915 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/a6624d05-e024-49f2-bf87-33e7ea4fccbb-ovnkube-config\") pod \"ovnkube-node-58bhb\" (UID: \"a6624d05-e024-49f2-bf87-33e7ea4fccbb\") " pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.072931 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/a6624d05-e024-49f2-bf87-33e7ea4fccbb-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-58bhb\" (UID: \"a6624d05-e024-49f2-bf87-33e7ea4fccbb\") " pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.072947 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cdf96\" (UniqueName: \"kubernetes.io/projected/a6624d05-e024-49f2-bf87-33e7ea4fccbb-kube-api-access-cdf96\") pod \"ovnkube-node-58bhb\" (UID: \"a6624d05-e024-49f2-bf87-33e7ea4fccbb\") " pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.072983 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/a6624d05-e024-49f2-bf87-33e7ea4fccbb-systemd-units\") pod \"ovnkube-node-58bhb\" (UID: \"a6624d05-e024-49f2-bf87-33e7ea4fccbb\") " pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.072999 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/a6624d05-e024-49f2-bf87-33e7ea4fccbb-run-openvswitch\") pod \"ovnkube-node-58bhb\" (UID: \"a6624d05-e024-49f2-bf87-33e7ea4fccbb\") " pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.073251 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.092907 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aab0f26-c4f7-40b2-892f-0c7d8586a1c1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4119269e9f663bc5c38f4fdc8a5b32f1c3d8446fed6fd36172c41207c9c287b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1bef054ed2b7c27f200f7ab089158ba9946ca17b507c85c377504f539812fcf6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb90630132db1396d9009fe35d6d6b8ab6afe59d32f196cf7a10f3179103227\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://123bbe58d8664c8fad1b86c9c321228f7ffe7cdf14c44e1d9be7802e10a3f0d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://813c083e0e8ba3b397a4b2795de1eac82299d7f673832272138f20162b7f2db2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-06T13:38:46Z\\\",\\\"message\\\":\\\"W1006 13:38:35.655009 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1006 13:38:35.655588 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759757915 cert, and key in /tmp/serving-cert-2033919833/serving-signer.crt, /tmp/serving-cert-2033919833/serving-signer.key\\\\nI1006 13:38:35.906575 1 observer_polling.go:159] Starting file observer\\\\nW1006 13:38:35.908632 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1006 13:38:35.908812 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1006 13:38:35.911572 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2033919833/tls.crt::/tmp/serving-cert-2033919833/tls.key\\\\\\\"\\\\nF1006 13:38:46.393904 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:35Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18af66d22d5905beeec34c4d158a88aadb1a8076e423996cd42e4172a31426e0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:35Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca584765a9838801f99c7fb1f7bbcc740e684033b1b19035ef95b72526c6eda1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca584765a9838801f99c7fb1f7bbcc740e684033b1b19035ef95b72526c6eda1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:32Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.122370 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.153620 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rhrzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62d4cce6-0583-40a6-b7ea-2996d07b49b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rhrzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.172381 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0010c888-d5ad-4b2b-8309-1647fdf0dee3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kvf5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kvf5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-7tb7h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.176621 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/a6624d05-e024-49f2-bf87-33e7ea4fccbb-host-run-netns\") pod \"ovnkube-node-58bhb\" (UID: \"a6624d05-e024-49f2-bf87-33e7ea4fccbb\") " pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.176673 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/a6624d05-e024-49f2-bf87-33e7ea4fccbb-host-cni-bin\") pod \"ovnkube-node-58bhb\" (UID: \"a6624d05-e024-49f2-bf87-33e7ea4fccbb\") " pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.176696 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/a6624d05-e024-49f2-bf87-33e7ea4fccbb-ovnkube-config\") pod \"ovnkube-node-58bhb\" (UID: \"a6624d05-e024-49f2-bf87-33e7ea4fccbb\") " pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.176716 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/a6624d05-e024-49f2-bf87-33e7ea4fccbb-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-58bhb\" (UID: \"a6624d05-e024-49f2-bf87-33e7ea4fccbb\") " pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.176737 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cdf96\" (UniqueName: \"kubernetes.io/projected/a6624d05-e024-49f2-bf87-33e7ea4fccbb-kube-api-access-cdf96\") pod \"ovnkube-node-58bhb\" (UID: \"a6624d05-e024-49f2-bf87-33e7ea4fccbb\") " pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.176768 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/a6624d05-e024-49f2-bf87-33e7ea4fccbb-systemd-units\") pod \"ovnkube-node-58bhb\" (UID: \"a6624d05-e024-49f2-bf87-33e7ea4fccbb\") " pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.176786 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/a6624d05-e024-49f2-bf87-33e7ea4fccbb-run-openvswitch\") pod \"ovnkube-node-58bhb\" (UID: \"a6624d05-e024-49f2-bf87-33e7ea4fccbb\") " pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.176806 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/a6624d05-e024-49f2-bf87-33e7ea4fccbb-log-socket\") pod \"ovnkube-node-58bhb\" (UID: \"a6624d05-e024-49f2-bf87-33e7ea4fccbb\") " pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.176827 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/a6624d05-e024-49f2-bf87-33e7ea4fccbb-ovnkube-script-lib\") pod \"ovnkube-node-58bhb\" (UID: \"a6624d05-e024-49f2-bf87-33e7ea4fccbb\") " pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.176847 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/a6624d05-e024-49f2-bf87-33e7ea4fccbb-ovn-node-metrics-cert\") pod \"ovnkube-node-58bhb\" (UID: \"a6624d05-e024-49f2-bf87-33e7ea4fccbb\") " pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.176875 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/a6624d05-e024-49f2-bf87-33e7ea4fccbb-host-slash\") pod \"ovnkube-node-58bhb\" (UID: \"a6624d05-e024-49f2-bf87-33e7ea4fccbb\") " pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.176895 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/a6624d05-e024-49f2-bf87-33e7ea4fccbb-host-run-ovn-kubernetes\") pod \"ovnkube-node-58bhb\" (UID: \"a6624d05-e024-49f2-bf87-33e7ea4fccbb\") " pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.176917 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/a6624d05-e024-49f2-bf87-33e7ea4fccbb-node-log\") pod \"ovnkube-node-58bhb\" (UID: \"a6624d05-e024-49f2-bf87-33e7ea4fccbb\") " pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.176937 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/a6624d05-e024-49f2-bf87-33e7ea4fccbb-host-cni-netd\") pod \"ovnkube-node-58bhb\" (UID: \"a6624d05-e024-49f2-bf87-33e7ea4fccbb\") " pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.176956 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/a6624d05-e024-49f2-bf87-33e7ea4fccbb-env-overrides\") pod \"ovnkube-node-58bhb\" (UID: \"a6624d05-e024-49f2-bf87-33e7ea4fccbb\") " pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.176987 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/a6624d05-e024-49f2-bf87-33e7ea4fccbb-run-systemd\") pod \"ovnkube-node-58bhb\" (UID: \"a6624d05-e024-49f2-bf87-33e7ea4fccbb\") " pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.177016 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/a6624d05-e024-49f2-bf87-33e7ea4fccbb-host-kubelet\") pod \"ovnkube-node-58bhb\" (UID: \"a6624d05-e024-49f2-bf87-33e7ea4fccbb\") " pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.177049 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/a6624d05-e024-49f2-bf87-33e7ea4fccbb-var-lib-openvswitch\") pod \"ovnkube-node-58bhb\" (UID: \"a6624d05-e024-49f2-bf87-33e7ea4fccbb\") " pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.177076 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/a6624d05-e024-49f2-bf87-33e7ea4fccbb-run-ovn\") pod \"ovnkube-node-58bhb\" (UID: \"a6624d05-e024-49f2-bf87-33e7ea4fccbb\") " pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.177154 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/a6624d05-e024-49f2-bf87-33e7ea4fccbb-etc-openvswitch\") pod \"ovnkube-node-58bhb\" (UID: \"a6624d05-e024-49f2-bf87-33e7ea4fccbb\") " pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.177218 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/a6624d05-e024-49f2-bf87-33e7ea4fccbb-etc-openvswitch\") pod \"ovnkube-node-58bhb\" (UID: \"a6624d05-e024-49f2-bf87-33e7ea4fccbb\") " pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.177264 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/a6624d05-e024-49f2-bf87-33e7ea4fccbb-host-run-netns\") pod \"ovnkube-node-58bhb\" (UID: \"a6624d05-e024-49f2-bf87-33e7ea4fccbb\") " pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.177305 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/a6624d05-e024-49f2-bf87-33e7ea4fccbb-host-cni-bin\") pod \"ovnkube-node-58bhb\" (UID: \"a6624d05-e024-49f2-bf87-33e7ea4fccbb\") " pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.178108 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/a6624d05-e024-49f2-bf87-33e7ea4fccbb-ovnkube-config\") pod \"ovnkube-node-58bhb\" (UID: \"a6624d05-e024-49f2-bf87-33e7ea4fccbb\") " pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.178164 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/a6624d05-e024-49f2-bf87-33e7ea4fccbb-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-58bhb\" (UID: \"a6624d05-e024-49f2-bf87-33e7ea4fccbb\") " pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.178424 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/a6624d05-e024-49f2-bf87-33e7ea4fccbb-systemd-units\") pod \"ovnkube-node-58bhb\" (UID: \"a6624d05-e024-49f2-bf87-33e7ea4fccbb\") " pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.178460 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/a6624d05-e024-49f2-bf87-33e7ea4fccbb-run-openvswitch\") pod \"ovnkube-node-58bhb\" (UID: \"a6624d05-e024-49f2-bf87-33e7ea4fccbb\") " pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.178485 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/a6624d05-e024-49f2-bf87-33e7ea4fccbb-log-socket\") pod \"ovnkube-node-58bhb\" (UID: \"a6624d05-e024-49f2-bf87-33e7ea4fccbb\") " pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.178749 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/a6624d05-e024-49f2-bf87-33e7ea4fccbb-host-cni-netd\") pod \"ovnkube-node-58bhb\" (UID: \"a6624d05-e024-49f2-bf87-33e7ea4fccbb\") " pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.179015 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/a6624d05-e024-49f2-bf87-33e7ea4fccbb-ovnkube-script-lib\") pod \"ovnkube-node-58bhb\" (UID: \"a6624d05-e024-49f2-bf87-33e7ea4fccbb\") " pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.179451 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/a6624d05-e024-49f2-bf87-33e7ea4fccbb-env-overrides\") pod \"ovnkube-node-58bhb\" (UID: \"a6624d05-e024-49f2-bf87-33e7ea4fccbb\") " pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.179503 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/a6624d05-e024-49f2-bf87-33e7ea4fccbb-run-systemd\") pod \"ovnkube-node-58bhb\" (UID: \"a6624d05-e024-49f2-bf87-33e7ea4fccbb\") " pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.179536 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/a6624d05-e024-49f2-bf87-33e7ea4fccbb-host-kubelet\") pod \"ovnkube-node-58bhb\" (UID: \"a6624d05-e024-49f2-bf87-33e7ea4fccbb\") " pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.179565 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/a6624d05-e024-49f2-bf87-33e7ea4fccbb-var-lib-openvswitch\") pod \"ovnkube-node-58bhb\" (UID: \"a6624d05-e024-49f2-bf87-33e7ea4fccbb\") " pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.179593 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/a6624d05-e024-49f2-bf87-33e7ea4fccbb-run-ovn\") pod \"ovnkube-node-58bhb\" (UID: \"a6624d05-e024-49f2-bf87-33e7ea4fccbb\") " pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.179626 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/a6624d05-e024-49f2-bf87-33e7ea4fccbb-host-run-ovn-kubernetes\") pod \"ovnkube-node-58bhb\" (UID: \"a6624d05-e024-49f2-bf87-33e7ea4fccbb\") " pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.179655 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/a6624d05-e024-49f2-bf87-33e7ea4fccbb-host-slash\") pod \"ovnkube-node-58bhb\" (UID: \"a6624d05-e024-49f2-bf87-33e7ea4fccbb\") " pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.179686 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/a6624d05-e024-49f2-bf87-33e7ea4fccbb-node-log\") pod \"ovnkube-node-58bhb\" (UID: \"a6624d05-e024-49f2-bf87-33e7ea4fccbb\") " pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.179781 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 06 13:38:53 crc kubenswrapper[4757]: E1006 13:38:53.179867 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.181910 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/a6624d05-e024-49f2-bf87-33e7ea4fccbb-ovn-node-metrics-cert\") pod \"ovnkube-node-58bhb\" (UID: \"a6624d05-e024-49f2-bf87-33e7ea4fccbb\") " pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.199454 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9qf7z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9144d9fd-70d7-4a29-8e6b-c020c611980a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mf4fb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9qf7z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:53Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.209998 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cdf96\" (UniqueName: \"kubernetes.io/projected/a6624d05-e024-49f2-bf87-33e7ea4fccbb-kube-api-access-cdf96\") pod \"ovnkube-node-58bhb\" (UID: \"a6624d05-e024-49f2-bf87-33e7ea4fccbb\") " pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.224428 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:53Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.236980 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:53Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.308324 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-rhrzr" event={"ID":"62d4cce6-0583-40a6-b7ea-2996d07b49b9","Type":"ContainerStarted","Data":"cca246bca508f7fad44c78b78b0d6bd3b8fbbbcf983d953f2f1b303f3f4c1a21"} Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.310016 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" event={"ID":"0010c888-d5ad-4b2b-8309-1647fdf0dee3","Type":"ContainerStarted","Data":"9a2028c8aaf6b1504a3b00edc15272958a5254f6e621734988e959fe0720f1ea"} Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.310063 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" event={"ID":"0010c888-d5ad-4b2b-8309-1647fdf0dee3","Type":"ContainerStarted","Data":"151c97566cda63000d69b51293c84fad70c6e331d3a1b62bd3a03ee5732d5214"} Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.310072 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" event={"ID":"0010c888-d5ad-4b2b-8309-1647fdf0dee3","Type":"ContainerStarted","Data":"32fe2bc1f61010a707c5f50e05f1412d083cc9b872aa9ce0fa7ebd800b634e6f"} Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.311587 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"eedd1843ac193d83a124266496aee5dadec33e62194e6d63c42d534745ee7005"} Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.312709 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-9j5jn" event={"ID":"de18b9fe-e396-469e-a6f6-d87ce91f3270","Type":"ContainerStarted","Data":"e4b5ed23b64e4bbc9f76f8cdef77d7f608d8ba029b540b69f64faf266f843155"} Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.312735 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-9j5jn" event={"ID":"de18b9fe-e396-469e-a6f6-d87ce91f3270","Type":"ContainerStarted","Data":"c859200db77ee0d216e9c93cd14a8d1bddf4fd62c0bdeb3ab849cb6773c4bb32"} Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.313268 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.314773 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"6b45680ebd17f15c56456a2c0eb5c7b58f6f42524be18df11dacbb3f4d904eac"} Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.314802 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"6454ffe81c62fdfd6a5c0ee4aff48e55b1dd83b14b07a93f915fc3023f6bd255"} Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.314817 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"4147f06212d2cf5be1098fb01d365f038dfa7d44d105906c44ffff1ca3bab26b"} Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.315955 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-9qf7z" event={"ID":"9144d9fd-70d7-4a29-8e6b-c020c611980a","Type":"ContainerStarted","Data":"7771512cbe14fa9b8d61be3a354a5fb8436b84ddf455ae4426440f1599f7ad90"} Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.315981 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-9qf7z" event={"ID":"9144d9fd-70d7-4a29-8e6b-c020c611980a","Type":"ContainerStarted","Data":"abeedf312dd30fafe4105b7347eb58c52239e76f9336d588117ea30245957298"} Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.317427 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"0d100743e6a2245c99a227c3380c1e9cbdfec0eff361d21977a3e313fa8f7bfc"} Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.317449 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"f45ab46176a2bc01839a07eb7f02b2b40fef7f9d88585db662e8041ccaca62bb"} Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.326225 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:53Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.337233 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:53Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:53 crc kubenswrapper[4757]: W1006 13:38:53.342662 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda6624d05_e024_49f2_bf87_33e7ea4fccbb.slice/crio-59e148336d62b9718667dd01888afdba55a69d24b59073039269c12f6f92ce5f WatchSource:0}: Error finding container 59e148336d62b9718667dd01888afdba55a69d24b59073039269c12f6f92ce5f: Status 404 returned error can't find the container with id 59e148336d62b9718667dd01888afdba55a69d24b59073039269c12f6f92ce5f Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.354015 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:53Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.370528 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9j5jn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de18b9fe-e396-469e-a6f6-d87ce91f3270\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2cslg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9j5jn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:53Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.383428 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:53Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.395763 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:53Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.426609 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6624d05-e024-49f2-bf87-33e7ea4fccbb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-58bhb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:53Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.456559 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aab0f26-c4f7-40b2-892f-0c7d8586a1c1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4119269e9f663bc5c38f4fdc8a5b32f1c3d8446fed6fd36172c41207c9c287b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1bef054ed2b7c27f200f7ab089158ba9946ca17b507c85c377504f539812fcf6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb90630132db1396d9009fe35d6d6b8ab6afe59d32f196cf7a10f3179103227\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://123bbe58d8664c8fad1b86c9c321228f7ffe7cdf14c44e1d9be7802e10a3f0d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://813c083e0e8ba3b397a4b2795de1eac82299d7f673832272138f20162b7f2db2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-06T13:38:46Z\\\",\\\"message\\\":\\\"W1006 13:38:35.655009 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1006 13:38:35.655588 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759757915 cert, and key in /tmp/serving-cert-2033919833/serving-signer.crt, /tmp/serving-cert-2033919833/serving-signer.key\\\\nI1006 13:38:35.906575 1 observer_polling.go:159] Starting file observer\\\\nW1006 13:38:35.908632 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1006 13:38:35.908812 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1006 13:38:35.911572 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2033919833/tls.crt::/tmp/serving-cert-2033919833/tls.key\\\\\\\"\\\\nF1006 13:38:46.393904 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:35Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18af66d22d5905beeec34c4d158a88aadb1a8076e423996cd42e4172a31426e0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:35Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca584765a9838801f99c7fb1f7bbcc740e684033b1b19035ef95b72526c6eda1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca584765a9838801f99c7fb1f7bbcc740e684033b1b19035ef95b72526c6eda1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:32Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:53Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.466031 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.469926 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.494200 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.516451 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:53Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.555319 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0010c888-d5ad-4b2b-8309-1647fdf0dee3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a2028c8aaf6b1504a3b00edc15272958a5254f6e621734988e959fe0720f1ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kvf5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://151c97566cda63000d69b51293c84fad70c6e331d3a1b62bd3a03ee5732d5214\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kvf5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-7tb7h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:53Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.598244 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9qf7z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9144d9fd-70d7-4a29-8e6b-c020c611980a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mf4fb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9qf7z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:53Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.640217 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rhrzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62d4cce6-0583-40a6-b7ea-2996d07b49b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rhrzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:53Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.676763 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b45680ebd17f15c56456a2c0eb5c7b58f6f42524be18df11dacbb3f4d904eac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6454ffe81c62fdfd6a5c0ee4aff48e55b1dd83b14b07a93f915fc3023f6bd255\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:53Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.718908 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:53Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.759224 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:53Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.783197 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.783363 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.783419 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 06 13:38:53 crc kubenswrapper[4757]: E1006 13:38:53.783550 4757 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 06 13:38:53 crc kubenswrapper[4757]: E1006 13:38:53.783575 4757 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 06 13:38:53 crc kubenswrapper[4757]: E1006 13:38:53.783620 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-06 13:38:55.783603291 +0000 UTC m=+24.280921838 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 06 13:38:53 crc kubenswrapper[4757]: E1006 13:38:53.783664 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-06 13:38:55.783643283 +0000 UTC m=+24.280962000 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 06 13:38:53 crc kubenswrapper[4757]: E1006 13:38:53.783734 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-06 13:38:55.783718826 +0000 UTC m=+24.281037463 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.794638 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9j5jn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de18b9fe-e396-469e-a6f6-d87ce91f3270\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4b5ed23b64e4bbc9f76f8cdef77d7f608d8ba029b540b69f64faf266f843155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2cslg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9j5jn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:53Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.836878 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:53Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.880160 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:53Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.884555 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.884620 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 06 13:38:53 crc kubenswrapper[4757]: E1006 13:38:53.884813 4757 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 06 13:38:53 crc kubenswrapper[4757]: E1006 13:38:53.884819 4757 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 06 13:38:53 crc kubenswrapper[4757]: E1006 13:38:53.884873 4757 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 06 13:38:53 crc kubenswrapper[4757]: E1006 13:38:53.884888 4757 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 06 13:38:53 crc kubenswrapper[4757]: E1006 13:38:53.884837 4757 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 06 13:38:53 crc kubenswrapper[4757]: E1006 13:38:53.884978 4757 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 06 13:38:53 crc kubenswrapper[4757]: E1006 13:38:53.884950 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-06 13:38:55.884933336 +0000 UTC m=+24.382251873 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 06 13:38:53 crc kubenswrapper[4757]: E1006 13:38:53.885066 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-06 13:38:55.885047161 +0000 UTC m=+24.382365698 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.923905 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6624d05-e024-49f2-bf87-33e7ea4fccbb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-58bhb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:53Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:53 crc kubenswrapper[4757]: I1006 13:38:53.958734 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aab0f26-c4f7-40b2-892f-0c7d8586a1c1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4119269e9f663bc5c38f4fdc8a5b32f1c3d8446fed6fd36172c41207c9c287b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1bef054ed2b7c27f200f7ab089158ba9946ca17b507c85c377504f539812fcf6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb90630132db1396d9009fe35d6d6b8ab6afe59d32f196cf7a10f3179103227\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://123bbe58d8664c8fad1b86c9c321228f7ffe7cdf14c44e1d9be7802e10a3f0d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://813c083e0e8ba3b397a4b2795de1eac82299d7f673832272138f20162b7f2db2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-06T13:38:46Z\\\",\\\"message\\\":\\\"W1006 13:38:35.655009 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1006 13:38:35.655588 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759757915 cert, and key in /tmp/serving-cert-2033919833/serving-signer.crt, /tmp/serving-cert-2033919833/serving-signer.key\\\\nI1006 13:38:35.906575 1 observer_polling.go:159] Starting file observer\\\\nW1006 13:38:35.908632 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1006 13:38:35.908812 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1006 13:38:35.911572 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2033919833/tls.crt::/tmp/serving-cert-2033919833/tls.key\\\\\\\"\\\\nF1006 13:38:46.393904 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:35Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18af66d22d5905beeec34c4d158a88aadb1a8076e423996cd42e4172a31426e0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:35Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca584765a9838801f99c7fb1f7bbcc740e684033b1b19035ef95b72526c6eda1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca584765a9838801f99c7fb1f7bbcc740e684033b1b19035ef95b72526c6eda1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:32Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:53Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:54 crc kubenswrapper[4757]: I1006 13:38:54.000465 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d100743e6a2245c99a227c3380c1e9cbdfec0eff361d21977a3e313fa8f7bfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:53Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:54 crc kubenswrapper[4757]: I1006 13:38:54.039027 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cd6138e-cd0c-4140-8173-eeb737e5b20a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e5392ae609235f0d0526e4d115125c20a75af131b89c07ae6bfa3b4bac84108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d06bc2afab6df22e2c8140de3e6288a3e5f4190b6bdb31a2e71170275e70cae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://056d1a4bdaee6e39a0ba0105c9b48d23cd37c37936951e8e2aa123f6de93463d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e08edbfb6418e26eb6c3ef0d229a623bc625e8e20c29d41a95adc8ac8dc2534\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:32Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:54Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:54 crc kubenswrapper[4757]: I1006 13:38:54.076410 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0010c888-d5ad-4b2b-8309-1647fdf0dee3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a2028c8aaf6b1504a3b00edc15272958a5254f6e621734988e959fe0720f1ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kvf5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://151c97566cda63000d69b51293c84fad70c6e331d3a1b62bd3a03ee5732d5214\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kvf5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-7tb7h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:54Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:54 crc kubenswrapper[4757]: I1006 13:38:54.106258 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-j889t"] Oct 06 13:38:54 crc kubenswrapper[4757]: I1006 13:38:54.106600 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-j889t" Oct 06 13:38:54 crc kubenswrapper[4757]: I1006 13:38:54.117559 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9qf7z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9144d9fd-70d7-4a29-8e6b-c020c611980a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7771512cbe14fa9b8d61be3a354a5fb8436b84ddf455ae4426440f1599f7ad90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mf4fb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9qf7z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:54Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:54 crc kubenswrapper[4757]: I1006 13:38:54.128975 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Oct 06 13:38:54 crc kubenswrapper[4757]: I1006 13:38:54.149766 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Oct 06 13:38:54 crc kubenswrapper[4757]: I1006 13:38:54.169582 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Oct 06 13:38:54 crc kubenswrapper[4757]: I1006 13:38:54.179519 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 06 13:38:54 crc kubenswrapper[4757]: I1006 13:38:54.179519 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 06 13:38:54 crc kubenswrapper[4757]: E1006 13:38:54.179655 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 06 13:38:54 crc kubenswrapper[4757]: E1006 13:38:54.179729 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 06 13:38:54 crc kubenswrapper[4757]: I1006 13:38:54.185002 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Oct 06 13:38:54 crc kubenswrapper[4757]: I1006 13:38:54.185985 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Oct 06 13:38:54 crc kubenswrapper[4757]: I1006 13:38:54.187511 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Oct 06 13:38:54 crc kubenswrapper[4757]: I1006 13:38:54.187790 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a097c3c1-7cfd-4ba8-878d-40b971843e92-host\") pod \"node-ca-j889t\" (UID: \"a097c3c1-7cfd-4ba8-878d-40b971843e92\") " pod="openshift-image-registry/node-ca-j889t" Oct 06 13:38:54 crc kubenswrapper[4757]: I1006 13:38:54.187853 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-29wtc\" (UniqueName: \"kubernetes.io/projected/a097c3c1-7cfd-4ba8-878d-40b971843e92-kube-api-access-29wtc\") pod \"node-ca-j889t\" (UID: \"a097c3c1-7cfd-4ba8-878d-40b971843e92\") " pod="openshift-image-registry/node-ca-j889t" Oct 06 13:38:54 crc kubenswrapper[4757]: I1006 13:38:54.187893 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/a097c3c1-7cfd-4ba8-878d-40b971843e92-serviceca\") pod \"node-ca-j889t\" (UID: \"a097c3c1-7cfd-4ba8-878d-40b971843e92\") " pod="openshift-image-registry/node-ca-j889t" Oct 06 13:38:54 crc kubenswrapper[4757]: I1006 13:38:54.188359 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Oct 06 13:38:54 crc kubenswrapper[4757]: I1006 13:38:54.188451 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Oct 06 13:38:54 crc kubenswrapper[4757]: I1006 13:38:54.189748 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Oct 06 13:38:54 crc kubenswrapper[4757]: I1006 13:38:54.190421 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Oct 06 13:38:54 crc kubenswrapper[4757]: I1006 13:38:54.191059 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Oct 06 13:38:54 crc kubenswrapper[4757]: I1006 13:38:54.192642 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Oct 06 13:38:54 crc kubenswrapper[4757]: I1006 13:38:54.193440 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Oct 06 13:38:54 crc kubenswrapper[4757]: I1006 13:38:54.194616 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Oct 06 13:38:54 crc kubenswrapper[4757]: I1006 13:38:54.195288 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Oct 06 13:38:54 crc kubenswrapper[4757]: I1006 13:38:54.196589 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Oct 06 13:38:54 crc kubenswrapper[4757]: I1006 13:38:54.197145 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Oct 06 13:38:54 crc kubenswrapper[4757]: I1006 13:38:54.197766 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Oct 06 13:38:54 crc kubenswrapper[4757]: I1006 13:38:54.198975 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Oct 06 13:38:54 crc kubenswrapper[4757]: I1006 13:38:54.200018 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Oct 06 13:38:54 crc kubenswrapper[4757]: I1006 13:38:54.203782 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Oct 06 13:38:54 crc kubenswrapper[4757]: I1006 13:38:54.204399 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Oct 06 13:38:54 crc kubenswrapper[4757]: I1006 13:38:54.205712 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Oct 06 13:38:54 crc kubenswrapper[4757]: I1006 13:38:54.207148 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Oct 06 13:38:54 crc kubenswrapper[4757]: I1006 13:38:54.207781 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Oct 06 13:38:54 crc kubenswrapper[4757]: I1006 13:38:54.209122 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Oct 06 13:38:54 crc kubenswrapper[4757]: I1006 13:38:54.209633 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Oct 06 13:38:54 crc kubenswrapper[4757]: I1006 13:38:54.211179 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Oct 06 13:38:54 crc kubenswrapper[4757]: I1006 13:38:54.211808 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Oct 06 13:38:54 crc kubenswrapper[4757]: I1006 13:38:54.212583 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Oct 06 13:38:54 crc kubenswrapper[4757]: I1006 13:38:54.214070 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Oct 06 13:38:54 crc kubenswrapper[4757]: I1006 13:38:54.214694 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Oct 06 13:38:54 crc kubenswrapper[4757]: I1006 13:38:54.215937 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Oct 06 13:38:54 crc kubenswrapper[4757]: I1006 13:38:54.216599 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Oct 06 13:38:54 crc kubenswrapper[4757]: I1006 13:38:54.217887 4757 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Oct 06 13:38:54 crc kubenswrapper[4757]: I1006 13:38:54.218028 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Oct 06 13:38:54 crc kubenswrapper[4757]: I1006 13:38:54.222471 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Oct 06 13:38:54 crc kubenswrapper[4757]: I1006 13:38:54.224231 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Oct 06 13:38:54 crc kubenswrapper[4757]: I1006 13:38:54.225051 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Oct 06 13:38:54 crc kubenswrapper[4757]: I1006 13:38:54.227370 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Oct 06 13:38:54 crc kubenswrapper[4757]: I1006 13:38:54.228478 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Oct 06 13:38:54 crc kubenswrapper[4757]: I1006 13:38:54.229770 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Oct 06 13:38:54 crc kubenswrapper[4757]: I1006 13:38:54.230811 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Oct 06 13:38:54 crc kubenswrapper[4757]: I1006 13:38:54.231911 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Oct 06 13:38:54 crc kubenswrapper[4757]: I1006 13:38:54.232507 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Oct 06 13:38:54 crc kubenswrapper[4757]: I1006 13:38:54.233605 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Oct 06 13:38:54 crc kubenswrapper[4757]: I1006 13:38:54.234625 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Oct 06 13:38:54 crc kubenswrapper[4757]: I1006 13:38:54.235649 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Oct 06 13:38:54 crc kubenswrapper[4757]: I1006 13:38:54.236331 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Oct 06 13:38:54 crc kubenswrapper[4757]: I1006 13:38:54.237483 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Oct 06 13:38:54 crc kubenswrapper[4757]: I1006 13:38:54.238062 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Oct 06 13:38:54 crc kubenswrapper[4757]: I1006 13:38:54.238835 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Oct 06 13:38:54 crc kubenswrapper[4757]: I1006 13:38:54.239364 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Oct 06 13:38:54 crc kubenswrapper[4757]: I1006 13:38:54.239406 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rhrzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62d4cce6-0583-40a6-b7ea-2996d07b49b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rhrzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:54Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:54 crc kubenswrapper[4757]: I1006 13:38:54.239861 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Oct 06 13:38:54 crc kubenswrapper[4757]: I1006 13:38:54.240341 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Oct 06 13:38:54 crc kubenswrapper[4757]: I1006 13:38:54.240875 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Oct 06 13:38:54 crc kubenswrapper[4757]: I1006 13:38:54.241522 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Oct 06 13:38:54 crc kubenswrapper[4757]: I1006 13:38:54.242126 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Oct 06 13:38:54 crc kubenswrapper[4757]: I1006 13:38:54.276077 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:54Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:54 crc kubenswrapper[4757]: I1006 13:38:54.288709 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-29wtc\" (UniqueName: \"kubernetes.io/projected/a097c3c1-7cfd-4ba8-878d-40b971843e92-kube-api-access-29wtc\") pod \"node-ca-j889t\" (UID: \"a097c3c1-7cfd-4ba8-878d-40b971843e92\") " pod="openshift-image-registry/node-ca-j889t" Oct 06 13:38:54 crc kubenswrapper[4757]: I1006 13:38:54.288806 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/a097c3c1-7cfd-4ba8-878d-40b971843e92-serviceca\") pod \"node-ca-j889t\" (UID: \"a097c3c1-7cfd-4ba8-878d-40b971843e92\") " pod="openshift-image-registry/node-ca-j889t" Oct 06 13:38:54 crc kubenswrapper[4757]: I1006 13:38:54.288865 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a097c3c1-7cfd-4ba8-878d-40b971843e92-host\") pod \"node-ca-j889t\" (UID: \"a097c3c1-7cfd-4ba8-878d-40b971843e92\") " pod="openshift-image-registry/node-ca-j889t" Oct 06 13:38:54 crc kubenswrapper[4757]: I1006 13:38:54.288941 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a097c3c1-7cfd-4ba8-878d-40b971843e92-host\") pod \"node-ca-j889t\" (UID: \"a097c3c1-7cfd-4ba8-878d-40b971843e92\") " pod="openshift-image-registry/node-ca-j889t" Oct 06 13:38:54 crc kubenswrapper[4757]: I1006 13:38:54.290355 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/a097c3c1-7cfd-4ba8-878d-40b971843e92-serviceca\") pod \"node-ca-j889t\" (UID: \"a097c3c1-7cfd-4ba8-878d-40b971843e92\") " pod="openshift-image-registry/node-ca-j889t" Oct 06 13:38:54 crc kubenswrapper[4757]: I1006 13:38:54.324357 4757 generic.go:334] "Generic (PLEG): container finished" podID="a6624d05-e024-49f2-bf87-33e7ea4fccbb" containerID="3fe5f1f425ddd770c518939dee061dc2641c1ca6c3538d8d30ac0724a6353936" exitCode=0 Oct 06 13:38:54 crc kubenswrapper[4757]: I1006 13:38:54.324509 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" event={"ID":"a6624d05-e024-49f2-bf87-33e7ea4fccbb","Type":"ContainerDied","Data":"3fe5f1f425ddd770c518939dee061dc2641c1ca6c3538d8d30ac0724a6353936"} Oct 06 13:38:54 crc kubenswrapper[4757]: I1006 13:38:54.324605 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" event={"ID":"a6624d05-e024-49f2-bf87-33e7ea4fccbb","Type":"ContainerStarted","Data":"59e148336d62b9718667dd01888afdba55a69d24b59073039269c12f6f92ce5f"} Oct 06 13:38:54 crc kubenswrapper[4757]: I1006 13:38:54.327536 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-29wtc\" (UniqueName: \"kubernetes.io/projected/a097c3c1-7cfd-4ba8-878d-40b971843e92-kube-api-access-29wtc\") pod \"node-ca-j889t\" (UID: \"a097c3c1-7cfd-4ba8-878d-40b971843e92\") " pod="openshift-image-registry/node-ca-j889t" Oct 06 13:38:54 crc kubenswrapper[4757]: I1006 13:38:54.336682 4757 generic.go:334] "Generic (PLEG): container finished" podID="62d4cce6-0583-40a6-b7ea-2996d07b49b9" containerID="a6cfbbedbad94b500e9bc7c5777459ffbf3db038e8a520a2c9c28d381955df55" exitCode=0 Oct 06 13:38:54 crc kubenswrapper[4757]: I1006 13:38:54.336825 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-rhrzr" event={"ID":"62d4cce6-0583-40a6-b7ea-2996d07b49b9","Type":"ContainerDied","Data":"a6cfbbedbad94b500e9bc7c5777459ffbf3db038e8a520a2c9c28d381955df55"} Oct 06 13:38:54 crc kubenswrapper[4757]: I1006 13:38:54.351878 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:54Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:54 crc kubenswrapper[4757]: E1006 13:38:54.354590 4757 kubelet.go:1929] "Failed creating a mirror pod for" err="pods \"kube-controller-manager-crc\" already exists" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 06 13:38:54 crc kubenswrapper[4757]: I1006 13:38:54.405439 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6624d05-e024-49f2-bf87-33e7ea4fccbb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-58bhb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:54Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:54 crc kubenswrapper[4757]: I1006 13:38:54.450883 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:54Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:54 crc kubenswrapper[4757]: I1006 13:38:54.476540 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9j5jn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de18b9fe-e396-469e-a6f6-d87ce91f3270\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4b5ed23b64e4bbc9f76f8cdef77d7f608d8ba029b540b69f64faf266f843155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2cslg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9j5jn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:54Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:54 crc kubenswrapper[4757]: I1006 13:38:54.518706 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aab0f26-c4f7-40b2-892f-0c7d8586a1c1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4119269e9f663bc5c38f4fdc8a5b32f1c3d8446fed6fd36172c41207c9c287b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1bef054ed2b7c27f200f7ab089158ba9946ca17b507c85c377504f539812fcf6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb90630132db1396d9009fe35d6d6b8ab6afe59d32f196cf7a10f3179103227\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://123bbe58d8664c8fad1b86c9c321228f7ffe7cdf14c44e1d9be7802e10a3f0d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://813c083e0e8ba3b397a4b2795de1eac82299d7f673832272138f20162b7f2db2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-06T13:38:46Z\\\",\\\"message\\\":\\\"W1006 13:38:35.655009 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1006 13:38:35.655588 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759757915 cert, and key in /tmp/serving-cert-2033919833/serving-signer.crt, /tmp/serving-cert-2033919833/serving-signer.key\\\\nI1006 13:38:35.906575 1 observer_polling.go:159] Starting file observer\\\\nW1006 13:38:35.908632 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1006 13:38:35.908812 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1006 13:38:35.911572 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2033919833/tls.crt::/tmp/serving-cert-2033919833/tls.key\\\\\\\"\\\\nF1006 13:38:46.393904 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:35Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18af66d22d5905beeec34c4d158a88aadb1a8076e423996cd42e4172a31426e0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:35Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca584765a9838801f99c7fb1f7bbcc740e684033b1b19035ef95b72526c6eda1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca584765a9838801f99c7fb1f7bbcc740e684033b1b19035ef95b72526c6eda1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:32Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:54Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:54 crc kubenswrapper[4757]: I1006 13:38:54.556969 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d100743e6a2245c99a227c3380c1e9cbdfec0eff361d21977a3e313fa8f7bfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:54Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:54 crc kubenswrapper[4757]: I1006 13:38:54.604061 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9qf7z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9144d9fd-70d7-4a29-8e6b-c020c611980a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7771512cbe14fa9b8d61be3a354a5fb8436b84ddf455ae4426440f1599f7ad90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mf4fb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9qf7z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:54Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:54 crc kubenswrapper[4757]: I1006 13:38:54.625218 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-j889t" Oct 06 13:38:54 crc kubenswrapper[4757]: I1006 13:38:54.636732 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rhrzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62d4cce6-0583-40a6-b7ea-2996d07b49b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rhrzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:54Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:54 crc kubenswrapper[4757]: W1006 13:38:54.643630 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda097c3c1_7cfd_4ba8_878d_40b971843e92.slice/crio-96df2794db597f3b33b250eccacd7c04dc246d7d97cfc558370b3d68bfce347b WatchSource:0}: Error finding container 96df2794db597f3b33b250eccacd7c04dc246d7d97cfc558370b3d68bfce347b: Status 404 returned error can't find the container with id 96df2794db597f3b33b250eccacd7c04dc246d7d97cfc558370b3d68bfce347b Oct 06 13:38:54 crc kubenswrapper[4757]: I1006 13:38:54.676351 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-j889t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a097c3c1-7cfd-4ba8-878d-40b971843e92\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-29wtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:54Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-j889t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:54Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:54 crc kubenswrapper[4757]: I1006 13:38:54.724065 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cd6138e-cd0c-4140-8173-eeb737e5b20a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e5392ae609235f0d0526e4d115125c20a75af131b89c07ae6bfa3b4bac84108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d06bc2afab6df22e2c8140de3e6288a3e5f4190b6bdb31a2e71170275e70cae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://056d1a4bdaee6e39a0ba0105c9b48d23cd37c37936951e8e2aa123f6de93463d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e08edbfb6418e26eb6c3ef0d229a623bc625e8e20c29d41a95adc8ac8dc2534\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:32Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:54Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:54 crc kubenswrapper[4757]: I1006 13:38:54.757309 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0010c888-d5ad-4b2b-8309-1647fdf0dee3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a2028c8aaf6b1504a3b00edc15272958a5254f6e621734988e959fe0720f1ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kvf5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://151c97566cda63000d69b51293c84fad70c6e331d3a1b62bd3a03ee5732d5214\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kvf5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-7tb7h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:54Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:54 crc kubenswrapper[4757]: I1006 13:38:54.798590 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b45680ebd17f15c56456a2c0eb5c7b58f6f42524be18df11dacbb3f4d904eac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6454ffe81c62fdfd6a5c0ee4aff48e55b1dd83b14b07a93f915fc3023f6bd255\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:54Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:54 crc kubenswrapper[4757]: I1006 13:38:54.842128 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:54Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:54 crc kubenswrapper[4757]: I1006 13:38:54.880816 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9qf7z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9144d9fd-70d7-4a29-8e6b-c020c611980a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7771512cbe14fa9b8d61be3a354a5fb8436b84ddf455ae4426440f1599f7ad90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mf4fb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9qf7z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:54Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:54 crc kubenswrapper[4757]: I1006 13:38:54.918553 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rhrzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62d4cce6-0583-40a6-b7ea-2996d07b49b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a6cfbbedbad94b500e9bc7c5777459ffbf3db038e8a520a2c9c28d381955df55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a6cfbbedbad94b500e9bc7c5777459ffbf3db038e8a520a2c9c28d381955df55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rhrzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:54Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:54 crc kubenswrapper[4757]: I1006 13:38:54.955023 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-j889t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a097c3c1-7cfd-4ba8-878d-40b971843e92\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-29wtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:54Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-j889t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:54Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:54 crc kubenswrapper[4757]: I1006 13:38:54.999177 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cd6138e-cd0c-4140-8173-eeb737e5b20a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e5392ae609235f0d0526e4d115125c20a75af131b89c07ae6bfa3b4bac84108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d06bc2afab6df22e2c8140de3e6288a3e5f4190b6bdb31a2e71170275e70cae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://056d1a4bdaee6e39a0ba0105c9b48d23cd37c37936951e8e2aa123f6de93463d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e08edbfb6418e26eb6c3ef0d229a623bc625e8e20c29d41a95adc8ac8dc2534\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:32Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:54Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:55 crc kubenswrapper[4757]: I1006 13:38:55.040538 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0010c888-d5ad-4b2b-8309-1647fdf0dee3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a2028c8aaf6b1504a3b00edc15272958a5254f6e621734988e959fe0720f1ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kvf5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://151c97566cda63000d69b51293c84fad70c6e331d3a1b62bd3a03ee5732d5214\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kvf5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-7tb7h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:55Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:55 crc kubenswrapper[4757]: I1006 13:38:55.079017 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b45680ebd17f15c56456a2c0eb5c7b58f6f42524be18df11dacbb3f4d904eac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6454ffe81c62fdfd6a5c0ee4aff48e55b1dd83b14b07a93f915fc3023f6bd255\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:55Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:55 crc kubenswrapper[4757]: I1006 13:38:55.118006 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:55Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:55 crc kubenswrapper[4757]: I1006 13:38:55.158245 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:55Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:55 crc kubenswrapper[4757]: I1006 13:38:55.179463 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 06 13:38:55 crc kubenswrapper[4757]: E1006 13:38:55.179559 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 06 13:38:55 crc kubenswrapper[4757]: I1006 13:38:55.196694 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:55Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:55 crc kubenswrapper[4757]: I1006 13:38:55.245776 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6624d05-e024-49f2-bf87-33e7ea4fccbb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3fe5f1f425ddd770c518939dee061dc2641c1ca6c3538d8d30ac0724a6353936\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3fe5f1f425ddd770c518939dee061dc2641c1ca6c3538d8d30ac0724a6353936\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-58bhb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:55Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:55 crc kubenswrapper[4757]: I1006 13:38:55.275571 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:55Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:55 crc kubenswrapper[4757]: I1006 13:38:55.317338 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9j5jn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de18b9fe-e396-469e-a6f6-d87ce91f3270\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4b5ed23b64e4bbc9f76f8cdef77d7f608d8ba029b540b69f64faf266f843155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2cslg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9j5jn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:55Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:55 crc kubenswrapper[4757]: I1006 13:38:55.343201 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" event={"ID":"a6624d05-e024-49f2-bf87-33e7ea4fccbb","Type":"ContainerStarted","Data":"df2b012b79afa6379b4a7add72a9093bbdbfcc8d618d6eae350a773fdddacfa8"} Oct 06 13:38:55 crc kubenswrapper[4757]: I1006 13:38:55.343521 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" event={"ID":"a6624d05-e024-49f2-bf87-33e7ea4fccbb","Type":"ContainerStarted","Data":"4093bde478f4910c2b1e2341bb0b3f640454b131e31b9ab7ad5900b8260b7561"} Oct 06 13:38:55 crc kubenswrapper[4757]: I1006 13:38:55.343530 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" event={"ID":"a6624d05-e024-49f2-bf87-33e7ea4fccbb","Type":"ContainerStarted","Data":"75c8038f49690378c7322e4d2e50f7d5073bb744c3f64317b7c3f4acffaa338d"} Oct 06 13:38:55 crc kubenswrapper[4757]: I1006 13:38:55.343539 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" event={"ID":"a6624d05-e024-49f2-bf87-33e7ea4fccbb","Type":"ContainerStarted","Data":"cbe06e8892901366539846fa68ea4e379c32ca7b21843383dc4a72ac70f92b98"} Oct 06 13:38:55 crc kubenswrapper[4757]: I1006 13:38:55.343549 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" event={"ID":"a6624d05-e024-49f2-bf87-33e7ea4fccbb","Type":"ContainerStarted","Data":"1580d51cb7ec22f70787157dc80236a312eca77785329c663d78c02ee9a0685d"} Oct 06 13:38:55 crc kubenswrapper[4757]: I1006 13:38:55.343558 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" event={"ID":"a6624d05-e024-49f2-bf87-33e7ea4fccbb","Type":"ContainerStarted","Data":"38df7deb98a16e465d5fdbad8e04c2e62c5cfb418a48fe9f01da335dba2907ed"} Oct 06 13:38:55 crc kubenswrapper[4757]: I1006 13:38:55.344956 4757 generic.go:334] "Generic (PLEG): container finished" podID="62d4cce6-0583-40a6-b7ea-2996d07b49b9" containerID="d8c16d3a08acde90ccf1beb5e9ced373395af7436ddbc05c24f6489f6f49fc75" exitCode=0 Oct 06 13:38:55 crc kubenswrapper[4757]: I1006 13:38:55.345262 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-rhrzr" event={"ID":"62d4cce6-0583-40a6-b7ea-2996d07b49b9","Type":"ContainerDied","Data":"d8c16d3a08acde90ccf1beb5e9ced373395af7436ddbc05c24f6489f6f49fc75"} Oct 06 13:38:55 crc kubenswrapper[4757]: I1006 13:38:55.347113 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-j889t" event={"ID":"a097c3c1-7cfd-4ba8-878d-40b971843e92","Type":"ContainerStarted","Data":"2cf3eee034e9bc6cc2729e4cd23569f535ea8d2058f1576f75baaa5cf8407f2c"} Oct 06 13:38:55 crc kubenswrapper[4757]: I1006 13:38:55.347141 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-j889t" event={"ID":"a097c3c1-7cfd-4ba8-878d-40b971843e92","Type":"ContainerStarted","Data":"96df2794db597f3b33b250eccacd7c04dc246d7d97cfc558370b3d68bfce347b"} Oct 06 13:38:55 crc kubenswrapper[4757]: I1006 13:38:55.358987 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aab0f26-c4f7-40b2-892f-0c7d8586a1c1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4119269e9f663bc5c38f4fdc8a5b32f1c3d8446fed6fd36172c41207c9c287b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1bef054ed2b7c27f200f7ab089158ba9946ca17b507c85c377504f539812fcf6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb90630132db1396d9009fe35d6d6b8ab6afe59d32f196cf7a10f3179103227\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://123bbe58d8664c8fad1b86c9c321228f7ffe7cdf14c44e1d9be7802e10a3f0d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://813c083e0e8ba3b397a4b2795de1eac82299d7f673832272138f20162b7f2db2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-06T13:38:46Z\\\",\\\"message\\\":\\\"W1006 13:38:35.655009 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1006 13:38:35.655588 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759757915 cert, and key in /tmp/serving-cert-2033919833/serving-signer.crt, /tmp/serving-cert-2033919833/serving-signer.key\\\\nI1006 13:38:35.906575 1 observer_polling.go:159] Starting file observer\\\\nW1006 13:38:35.908632 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1006 13:38:35.908812 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1006 13:38:35.911572 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2033919833/tls.crt::/tmp/serving-cert-2033919833/tls.key\\\\\\\"\\\\nF1006 13:38:46.393904 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:35Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18af66d22d5905beeec34c4d158a88aadb1a8076e423996cd42e4172a31426e0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:35Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca584765a9838801f99c7fb1f7bbcc740e684033b1b19035ef95b72526c6eda1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca584765a9838801f99c7fb1f7bbcc740e684033b1b19035ef95b72526c6eda1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:32Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:55Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:55 crc kubenswrapper[4757]: I1006 13:38:55.400078 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d100743e6a2245c99a227c3380c1e9cbdfec0eff361d21977a3e313fa8f7bfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:55Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:55 crc kubenswrapper[4757]: I1006 13:38:55.438508 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b45680ebd17f15c56456a2c0eb5c7b58f6f42524be18df11dacbb3f4d904eac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6454ffe81c62fdfd6a5c0ee4aff48e55b1dd83b14b07a93f915fc3023f6bd255\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:55Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:55 crc kubenswrapper[4757]: I1006 13:38:55.476352 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:55Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:55 crc kubenswrapper[4757]: I1006 13:38:55.515136 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:55Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:55 crc kubenswrapper[4757]: I1006 13:38:55.553862 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9j5jn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de18b9fe-e396-469e-a6f6-d87ce91f3270\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4b5ed23b64e4bbc9f76f8cdef77d7f608d8ba029b540b69f64faf266f843155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2cslg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9j5jn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:55Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:55 crc kubenswrapper[4757]: I1006 13:38:55.599371 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:55Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:55 crc kubenswrapper[4757]: I1006 13:38:55.635641 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:55Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:55 crc kubenswrapper[4757]: I1006 13:38:55.691201 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6624d05-e024-49f2-bf87-33e7ea4fccbb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3fe5f1f425ddd770c518939dee061dc2641c1ca6c3538d8d30ac0724a6353936\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3fe5f1f425ddd770c518939dee061dc2641c1ca6c3538d8d30ac0724a6353936\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-58bhb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:55Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:55 crc kubenswrapper[4757]: I1006 13:38:55.725265 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aab0f26-c4f7-40b2-892f-0c7d8586a1c1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4119269e9f663bc5c38f4fdc8a5b32f1c3d8446fed6fd36172c41207c9c287b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1bef054ed2b7c27f200f7ab089158ba9946ca17b507c85c377504f539812fcf6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb90630132db1396d9009fe35d6d6b8ab6afe59d32f196cf7a10f3179103227\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://123bbe58d8664c8fad1b86c9c321228f7ffe7cdf14c44e1d9be7802e10a3f0d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://813c083e0e8ba3b397a4b2795de1eac82299d7f673832272138f20162b7f2db2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-06T13:38:46Z\\\",\\\"message\\\":\\\"W1006 13:38:35.655009 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1006 13:38:35.655588 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759757915 cert, and key in /tmp/serving-cert-2033919833/serving-signer.crt, /tmp/serving-cert-2033919833/serving-signer.key\\\\nI1006 13:38:35.906575 1 observer_polling.go:159] Starting file observer\\\\nW1006 13:38:35.908632 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1006 13:38:35.908812 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1006 13:38:35.911572 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2033919833/tls.crt::/tmp/serving-cert-2033919833/tls.key\\\\\\\"\\\\nF1006 13:38:46.393904 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:35Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18af66d22d5905beeec34c4d158a88aadb1a8076e423996cd42e4172a31426e0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:35Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca584765a9838801f99c7fb1f7bbcc740e684033b1b19035ef95b72526c6eda1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca584765a9838801f99c7fb1f7bbcc740e684033b1b19035ef95b72526c6eda1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:32Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:55Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:55 crc kubenswrapper[4757]: I1006 13:38:55.765467 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d100743e6a2245c99a227c3380c1e9cbdfec0eff361d21977a3e313fa8f7bfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:55Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:55 crc kubenswrapper[4757]: I1006 13:38:55.799745 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cd6138e-cd0c-4140-8173-eeb737e5b20a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e5392ae609235f0d0526e4d115125c20a75af131b89c07ae6bfa3b4bac84108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d06bc2afab6df22e2c8140de3e6288a3e5f4190b6bdb31a2e71170275e70cae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://056d1a4bdaee6e39a0ba0105c9b48d23cd37c37936951e8e2aa123f6de93463d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e08edbfb6418e26eb6c3ef0d229a623bc625e8e20c29d41a95adc8ac8dc2534\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:32Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:55Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:55 crc kubenswrapper[4757]: I1006 13:38:55.809306 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 06 13:38:55 crc kubenswrapper[4757]: E1006 13:38:55.809472 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-06 13:38:59.809454343 +0000 UTC m=+28.306772880 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:38:55 crc kubenswrapper[4757]: I1006 13:38:55.809582 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 06 13:38:55 crc kubenswrapper[4757]: I1006 13:38:55.809655 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 06 13:38:55 crc kubenswrapper[4757]: E1006 13:38:55.809744 4757 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 06 13:38:55 crc kubenswrapper[4757]: E1006 13:38:55.809812 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-06 13:38:59.809796037 +0000 UTC m=+28.307114574 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 06 13:38:55 crc kubenswrapper[4757]: E1006 13:38:55.809749 4757 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 06 13:38:55 crc kubenswrapper[4757]: E1006 13:38:55.809850 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-06 13:38:59.809843589 +0000 UTC m=+28.307162126 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 06 13:38:55 crc kubenswrapper[4757]: I1006 13:38:55.836236 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0010c888-d5ad-4b2b-8309-1647fdf0dee3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a2028c8aaf6b1504a3b00edc15272958a5254f6e621734988e959fe0720f1ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kvf5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://151c97566cda63000d69b51293c84fad70c6e331d3a1b62bd3a03ee5732d5214\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kvf5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-7tb7h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:55Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:55 crc kubenswrapper[4757]: I1006 13:38:55.879566 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9qf7z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9144d9fd-70d7-4a29-8e6b-c020c611980a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7771512cbe14fa9b8d61be3a354a5fb8436b84ddf455ae4426440f1599f7ad90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mf4fb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9qf7z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:55Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:55 crc kubenswrapper[4757]: I1006 13:38:55.910417 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 06 13:38:55 crc kubenswrapper[4757]: I1006 13:38:55.910489 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 06 13:38:55 crc kubenswrapper[4757]: E1006 13:38:55.910685 4757 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 06 13:38:55 crc kubenswrapper[4757]: E1006 13:38:55.910711 4757 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 06 13:38:55 crc kubenswrapper[4757]: E1006 13:38:55.910725 4757 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 06 13:38:55 crc kubenswrapper[4757]: E1006 13:38:55.910748 4757 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 06 13:38:55 crc kubenswrapper[4757]: E1006 13:38:55.910788 4757 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 06 13:38:55 crc kubenswrapper[4757]: E1006 13:38:55.910806 4757 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 06 13:38:55 crc kubenswrapper[4757]: E1006 13:38:55.910790 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-06 13:38:59.910772717 +0000 UTC m=+28.408091264 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 06 13:38:55 crc kubenswrapper[4757]: E1006 13:38:55.910892 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-06 13:38:59.910873341 +0000 UTC m=+28.408191888 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 06 13:38:55 crc kubenswrapper[4757]: I1006 13:38:55.924725 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rhrzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62d4cce6-0583-40a6-b7ea-2996d07b49b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a6cfbbedbad94b500e9bc7c5777459ffbf3db038e8a520a2c9c28d381955df55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a6cfbbedbad94b500e9bc7c5777459ffbf3db038e8a520a2c9c28d381955df55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8c16d3a08acde90ccf1beb5e9ced373395af7436ddbc05c24f6489f6f49fc75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8c16d3a08acde90ccf1beb5e9ced373395af7436ddbc05c24f6489f6f49fc75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rhrzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:55Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:55 crc kubenswrapper[4757]: I1006 13:38:55.961856 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-j889t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a097c3c1-7cfd-4ba8-878d-40b971843e92\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cf3eee034e9bc6cc2729e4cd23569f535ea8d2058f1576f75baaa5cf8407f2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-29wtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:54Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-j889t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:55Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:56 crc kubenswrapper[4757]: I1006 13:38:56.179125 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 06 13:38:56 crc kubenswrapper[4757]: I1006 13:38:56.179148 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 06 13:38:56 crc kubenswrapper[4757]: E1006 13:38:56.179436 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 06 13:38:56 crc kubenswrapper[4757]: E1006 13:38:56.179267 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 06 13:38:56 crc kubenswrapper[4757]: I1006 13:38:56.353412 4757 generic.go:334] "Generic (PLEG): container finished" podID="62d4cce6-0583-40a6-b7ea-2996d07b49b9" containerID="7471a1d670fcee421cb43e0a8b8a2b5e15c9d428659f8d63be54895493225242" exitCode=0 Oct 06 13:38:56 crc kubenswrapper[4757]: I1006 13:38:56.353516 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-rhrzr" event={"ID":"62d4cce6-0583-40a6-b7ea-2996d07b49b9","Type":"ContainerDied","Data":"7471a1d670fcee421cb43e0a8b8a2b5e15c9d428659f8d63be54895493225242"} Oct 06 13:38:56 crc kubenswrapper[4757]: I1006 13:38:56.356141 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"70151c959d98e2cb056e57e50b67c68b1c031beb375b357c8941bc9b81dc040f"} Oct 06 13:38:56 crc kubenswrapper[4757]: I1006 13:38:56.379343 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b45680ebd17f15c56456a2c0eb5c7b58f6f42524be18df11dacbb3f4d904eac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6454ffe81c62fdfd6a5c0ee4aff48e55b1dd83b14b07a93f915fc3023f6bd255\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:56Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:56 crc kubenswrapper[4757]: I1006 13:38:56.400082 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:56Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:56 crc kubenswrapper[4757]: I1006 13:38:56.429476 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6624d05-e024-49f2-bf87-33e7ea4fccbb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3fe5f1f425ddd770c518939dee061dc2641c1ca6c3538d8d30ac0724a6353936\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3fe5f1f425ddd770c518939dee061dc2641c1ca6c3538d8d30ac0724a6353936\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-58bhb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:56Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:56 crc kubenswrapper[4757]: I1006 13:38:56.451279 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:56Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:56 crc kubenswrapper[4757]: I1006 13:38:56.465338 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9j5jn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de18b9fe-e396-469e-a6f6-d87ce91f3270\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4b5ed23b64e4bbc9f76f8cdef77d7f608d8ba029b540b69f64faf266f843155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2cslg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9j5jn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:56Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:56 crc kubenswrapper[4757]: I1006 13:38:56.479016 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:56Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:56 crc kubenswrapper[4757]: I1006 13:38:56.495465 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:56Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:56 crc kubenswrapper[4757]: I1006 13:38:56.531626 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aab0f26-c4f7-40b2-892f-0c7d8586a1c1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4119269e9f663bc5c38f4fdc8a5b32f1c3d8446fed6fd36172c41207c9c287b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1bef054ed2b7c27f200f7ab089158ba9946ca17b507c85c377504f539812fcf6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb90630132db1396d9009fe35d6d6b8ab6afe59d32f196cf7a10f3179103227\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://123bbe58d8664c8fad1b86c9c321228f7ffe7cdf14c44e1d9be7802e10a3f0d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://813c083e0e8ba3b397a4b2795de1eac82299d7f673832272138f20162b7f2db2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-06T13:38:46Z\\\",\\\"message\\\":\\\"W1006 13:38:35.655009 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1006 13:38:35.655588 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759757915 cert, and key in /tmp/serving-cert-2033919833/serving-signer.crt, /tmp/serving-cert-2033919833/serving-signer.key\\\\nI1006 13:38:35.906575 1 observer_polling.go:159] Starting file observer\\\\nW1006 13:38:35.908632 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1006 13:38:35.908812 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1006 13:38:35.911572 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2033919833/tls.crt::/tmp/serving-cert-2033919833/tls.key\\\\\\\"\\\\nF1006 13:38:46.393904 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:35Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18af66d22d5905beeec34c4d158a88aadb1a8076e423996cd42e4172a31426e0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:35Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca584765a9838801f99c7fb1f7bbcc740e684033b1b19035ef95b72526c6eda1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca584765a9838801f99c7fb1f7bbcc740e684033b1b19035ef95b72526c6eda1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:32Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:56Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:56 crc kubenswrapper[4757]: I1006 13:38:56.557485 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d100743e6a2245c99a227c3380c1e9cbdfec0eff361d21977a3e313fa8f7bfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:56Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:56 crc kubenswrapper[4757]: I1006 13:38:56.571150 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-j889t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a097c3c1-7cfd-4ba8-878d-40b971843e92\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cf3eee034e9bc6cc2729e4cd23569f535ea8d2058f1576f75baaa5cf8407f2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-29wtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:54Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-j889t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:56Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:56 crc kubenswrapper[4757]: I1006 13:38:56.584051 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cd6138e-cd0c-4140-8173-eeb737e5b20a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e5392ae609235f0d0526e4d115125c20a75af131b89c07ae6bfa3b4bac84108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d06bc2afab6df22e2c8140de3e6288a3e5f4190b6bdb31a2e71170275e70cae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://056d1a4bdaee6e39a0ba0105c9b48d23cd37c37936951e8e2aa123f6de93463d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e08edbfb6418e26eb6c3ef0d229a623bc625e8e20c29d41a95adc8ac8dc2534\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:32Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:56Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:56 crc kubenswrapper[4757]: I1006 13:38:56.595933 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0010c888-d5ad-4b2b-8309-1647fdf0dee3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a2028c8aaf6b1504a3b00edc15272958a5254f6e621734988e959fe0720f1ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kvf5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://151c97566cda63000d69b51293c84fad70c6e331d3a1b62bd3a03ee5732d5214\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kvf5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-7tb7h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:56Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:56 crc kubenswrapper[4757]: I1006 13:38:56.610164 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9qf7z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9144d9fd-70d7-4a29-8e6b-c020c611980a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7771512cbe14fa9b8d61be3a354a5fb8436b84ddf455ae4426440f1599f7ad90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mf4fb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9qf7z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:56Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:56 crc kubenswrapper[4757]: I1006 13:38:56.627037 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rhrzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62d4cce6-0583-40a6-b7ea-2996d07b49b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a6cfbbedbad94b500e9bc7c5777459ffbf3db038e8a520a2c9c28d381955df55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a6cfbbedbad94b500e9bc7c5777459ffbf3db038e8a520a2c9c28d381955df55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8c16d3a08acde90ccf1beb5e9ced373395af7436ddbc05c24f6489f6f49fc75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8c16d3a08acde90ccf1beb5e9ced373395af7436ddbc05c24f6489f6f49fc75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7471a1d670fcee421cb43e0a8b8a2b5e15c9d428659f8d63be54895493225242\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7471a1d670fcee421cb43e0a8b8a2b5e15c9d428659f8d63be54895493225242\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rhrzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:56Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:56 crc kubenswrapper[4757]: I1006 13:38:56.644264 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aab0f26-c4f7-40b2-892f-0c7d8586a1c1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4119269e9f663bc5c38f4fdc8a5b32f1c3d8446fed6fd36172c41207c9c287b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1bef054ed2b7c27f200f7ab089158ba9946ca17b507c85c377504f539812fcf6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb90630132db1396d9009fe35d6d6b8ab6afe59d32f196cf7a10f3179103227\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://123bbe58d8664c8fad1b86c9c321228f7ffe7cdf14c44e1d9be7802e10a3f0d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://813c083e0e8ba3b397a4b2795de1eac82299d7f673832272138f20162b7f2db2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-06T13:38:46Z\\\",\\\"message\\\":\\\"W1006 13:38:35.655009 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1006 13:38:35.655588 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759757915 cert, and key in /tmp/serving-cert-2033919833/serving-signer.crt, /tmp/serving-cert-2033919833/serving-signer.key\\\\nI1006 13:38:35.906575 1 observer_polling.go:159] Starting file observer\\\\nW1006 13:38:35.908632 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1006 13:38:35.908812 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1006 13:38:35.911572 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2033919833/tls.crt::/tmp/serving-cert-2033919833/tls.key\\\\\\\"\\\\nF1006 13:38:46.393904 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:35Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18af66d22d5905beeec34c4d158a88aadb1a8076e423996cd42e4172a31426e0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:35Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca584765a9838801f99c7fb1f7bbcc740e684033b1b19035ef95b72526c6eda1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca584765a9838801f99c7fb1f7bbcc740e684033b1b19035ef95b72526c6eda1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:32Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:56Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:56 crc kubenswrapper[4757]: I1006 13:38:56.658747 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d100743e6a2245c99a227c3380c1e9cbdfec0eff361d21977a3e313fa8f7bfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:56Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:56 crc kubenswrapper[4757]: I1006 13:38:56.672772 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cd6138e-cd0c-4140-8173-eeb737e5b20a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e5392ae609235f0d0526e4d115125c20a75af131b89c07ae6bfa3b4bac84108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d06bc2afab6df22e2c8140de3e6288a3e5f4190b6bdb31a2e71170275e70cae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://056d1a4bdaee6e39a0ba0105c9b48d23cd37c37936951e8e2aa123f6de93463d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e08edbfb6418e26eb6c3ef0d229a623bc625e8e20c29d41a95adc8ac8dc2534\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:32Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:56Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:56 crc kubenswrapper[4757]: I1006 13:38:56.687849 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0010c888-d5ad-4b2b-8309-1647fdf0dee3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a2028c8aaf6b1504a3b00edc15272958a5254f6e621734988e959fe0720f1ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kvf5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://151c97566cda63000d69b51293c84fad70c6e331d3a1b62bd3a03ee5732d5214\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kvf5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-7tb7h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:56Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:56 crc kubenswrapper[4757]: I1006 13:38:56.718414 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9qf7z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9144d9fd-70d7-4a29-8e6b-c020c611980a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7771512cbe14fa9b8d61be3a354a5fb8436b84ddf455ae4426440f1599f7ad90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mf4fb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9qf7z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:56Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:56 crc kubenswrapper[4757]: I1006 13:38:56.759406 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rhrzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62d4cce6-0583-40a6-b7ea-2996d07b49b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a6cfbbedbad94b500e9bc7c5777459ffbf3db038e8a520a2c9c28d381955df55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a6cfbbedbad94b500e9bc7c5777459ffbf3db038e8a520a2c9c28d381955df55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8c16d3a08acde90ccf1beb5e9ced373395af7436ddbc05c24f6489f6f49fc75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8c16d3a08acde90ccf1beb5e9ced373395af7436ddbc05c24f6489f6f49fc75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7471a1d670fcee421cb43e0a8b8a2b5e15c9d428659f8d63be54895493225242\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7471a1d670fcee421cb43e0a8b8a2b5e15c9d428659f8d63be54895493225242\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rhrzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:56Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:56 crc kubenswrapper[4757]: I1006 13:38:56.798435 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-j889t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a097c3c1-7cfd-4ba8-878d-40b971843e92\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cf3eee034e9bc6cc2729e4cd23569f535ea8d2058f1576f75baaa5cf8407f2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-29wtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:54Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-j889t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:56Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:56 crc kubenswrapper[4757]: I1006 13:38:56.840513 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b45680ebd17f15c56456a2c0eb5c7b58f6f42524be18df11dacbb3f4d904eac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6454ffe81c62fdfd6a5c0ee4aff48e55b1dd83b14b07a93f915fc3023f6bd255\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:56Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:56 crc kubenswrapper[4757]: I1006 13:38:56.878721 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:56Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:56 crc kubenswrapper[4757]: I1006 13:38:56.918178 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:56Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:56 crc kubenswrapper[4757]: I1006 13:38:56.956864 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9j5jn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de18b9fe-e396-469e-a6f6-d87ce91f3270\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4b5ed23b64e4bbc9f76f8cdef77d7f608d8ba029b540b69f64faf266f843155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2cslg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9j5jn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:56Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:57 crc kubenswrapper[4757]: I1006 13:38:57.002912 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70151c959d98e2cb056e57e50b67c68b1c031beb375b357c8941bc9b81dc040f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:57Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:57 crc kubenswrapper[4757]: I1006 13:38:57.035123 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:57Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:57 crc kubenswrapper[4757]: I1006 13:38:57.095144 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6624d05-e024-49f2-bf87-33e7ea4fccbb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3fe5f1f425ddd770c518939dee061dc2641c1ca6c3538d8d30ac0724a6353936\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3fe5f1f425ddd770c518939dee061dc2641c1ca6c3538d8d30ac0724a6353936\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-58bhb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:57Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:57 crc kubenswrapper[4757]: I1006 13:38:57.179878 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 06 13:38:57 crc kubenswrapper[4757]: E1006 13:38:57.180078 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 06 13:38:57 crc kubenswrapper[4757]: I1006 13:38:57.363333 4757 generic.go:334] "Generic (PLEG): container finished" podID="62d4cce6-0583-40a6-b7ea-2996d07b49b9" containerID="2d5889f0b8d556cbb5791eb0d9d5475c4fa8beb0c5e9614081c8a7002353282a" exitCode=0 Oct 06 13:38:57 crc kubenswrapper[4757]: I1006 13:38:57.363459 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-rhrzr" event={"ID":"62d4cce6-0583-40a6-b7ea-2996d07b49b9","Type":"ContainerDied","Data":"2d5889f0b8d556cbb5791eb0d9d5475c4fa8beb0c5e9614081c8a7002353282a"} Oct 06 13:38:57 crc kubenswrapper[4757]: I1006 13:38:57.383286 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:57Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:57 crc kubenswrapper[4757]: I1006 13:38:57.396072 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9j5jn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de18b9fe-e396-469e-a6f6-d87ce91f3270\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4b5ed23b64e4bbc9f76f8cdef77d7f608d8ba029b540b69f64faf266f843155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2cslg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9j5jn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:57Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:57 crc kubenswrapper[4757]: I1006 13:38:57.415166 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70151c959d98e2cb056e57e50b67c68b1c031beb375b357c8941bc9b81dc040f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:57Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:57 crc kubenswrapper[4757]: I1006 13:38:57.436934 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:57Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:57 crc kubenswrapper[4757]: I1006 13:38:57.461763 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6624d05-e024-49f2-bf87-33e7ea4fccbb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3fe5f1f425ddd770c518939dee061dc2641c1ca6c3538d8d30ac0724a6353936\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3fe5f1f425ddd770c518939dee061dc2641c1ca6c3538d8d30ac0724a6353936\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-58bhb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:57Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:57 crc kubenswrapper[4757]: I1006 13:38:57.477617 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aab0f26-c4f7-40b2-892f-0c7d8586a1c1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4119269e9f663bc5c38f4fdc8a5b32f1c3d8446fed6fd36172c41207c9c287b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1bef054ed2b7c27f200f7ab089158ba9946ca17b507c85c377504f539812fcf6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb90630132db1396d9009fe35d6d6b8ab6afe59d32f196cf7a10f3179103227\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://123bbe58d8664c8fad1b86c9c321228f7ffe7cdf14c44e1d9be7802e10a3f0d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://813c083e0e8ba3b397a4b2795de1eac82299d7f673832272138f20162b7f2db2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-06T13:38:46Z\\\",\\\"message\\\":\\\"W1006 13:38:35.655009 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1006 13:38:35.655588 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759757915 cert, and key in /tmp/serving-cert-2033919833/serving-signer.crt, /tmp/serving-cert-2033919833/serving-signer.key\\\\nI1006 13:38:35.906575 1 observer_polling.go:159] Starting file observer\\\\nW1006 13:38:35.908632 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1006 13:38:35.908812 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1006 13:38:35.911572 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2033919833/tls.crt::/tmp/serving-cert-2033919833/tls.key\\\\\\\"\\\\nF1006 13:38:46.393904 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:35Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18af66d22d5905beeec34c4d158a88aadb1a8076e423996cd42e4172a31426e0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:35Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca584765a9838801f99c7fb1f7bbcc740e684033b1b19035ef95b72526c6eda1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca584765a9838801f99c7fb1f7bbcc740e684033b1b19035ef95b72526c6eda1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:32Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:57Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:57 crc kubenswrapper[4757]: I1006 13:38:57.501596 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d100743e6a2245c99a227c3380c1e9cbdfec0eff361d21977a3e313fa8f7bfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:57Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:57 crc kubenswrapper[4757]: I1006 13:38:57.515920 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cd6138e-cd0c-4140-8173-eeb737e5b20a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e5392ae609235f0d0526e4d115125c20a75af131b89c07ae6bfa3b4bac84108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d06bc2afab6df22e2c8140de3e6288a3e5f4190b6bdb31a2e71170275e70cae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://056d1a4bdaee6e39a0ba0105c9b48d23cd37c37936951e8e2aa123f6de93463d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e08edbfb6418e26eb6c3ef0d229a623bc625e8e20c29d41a95adc8ac8dc2534\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:32Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:57Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:57 crc kubenswrapper[4757]: I1006 13:38:57.527046 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0010c888-d5ad-4b2b-8309-1647fdf0dee3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a2028c8aaf6b1504a3b00edc15272958a5254f6e621734988e959fe0720f1ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kvf5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://151c97566cda63000d69b51293c84fad70c6e331d3a1b62bd3a03ee5732d5214\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kvf5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-7tb7h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:57Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:57 crc kubenswrapper[4757]: I1006 13:38:57.542168 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9qf7z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9144d9fd-70d7-4a29-8e6b-c020c611980a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7771512cbe14fa9b8d61be3a354a5fb8436b84ddf455ae4426440f1599f7ad90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mf4fb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9qf7z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:57Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:57 crc kubenswrapper[4757]: I1006 13:38:57.559141 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rhrzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62d4cce6-0583-40a6-b7ea-2996d07b49b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a6cfbbedbad94b500e9bc7c5777459ffbf3db038e8a520a2c9c28d381955df55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a6cfbbedbad94b500e9bc7c5777459ffbf3db038e8a520a2c9c28d381955df55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8c16d3a08acde90ccf1beb5e9ced373395af7436ddbc05c24f6489f6f49fc75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8c16d3a08acde90ccf1beb5e9ced373395af7436ddbc05c24f6489f6f49fc75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7471a1d670fcee421cb43e0a8b8a2b5e15c9d428659f8d63be54895493225242\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7471a1d670fcee421cb43e0a8b8a2b5e15c9d428659f8d63be54895493225242\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d5889f0b8d556cbb5791eb0d9d5475c4fa8beb0c5e9614081c8a7002353282a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d5889f0b8d556cbb5791eb0d9d5475c4fa8beb0c5e9614081c8a7002353282a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rhrzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:57Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:57 crc kubenswrapper[4757]: I1006 13:38:57.572883 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-j889t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a097c3c1-7cfd-4ba8-878d-40b971843e92\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cf3eee034e9bc6cc2729e4cd23569f535ea8d2058f1576f75baaa5cf8407f2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-29wtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:54Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-j889t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:57Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:57 crc kubenswrapper[4757]: I1006 13:38:57.595338 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b45680ebd17f15c56456a2c0eb5c7b58f6f42524be18df11dacbb3f4d904eac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6454ffe81c62fdfd6a5c0ee4aff48e55b1dd83b14b07a93f915fc3023f6bd255\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:57Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:57 crc kubenswrapper[4757]: I1006 13:38:57.637041 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:57Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:58 crc kubenswrapper[4757]: I1006 13:38:58.009575 4757 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 06 13:38:58 crc kubenswrapper[4757]: I1006 13:38:58.012598 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:38:58 crc kubenswrapper[4757]: I1006 13:38:58.012638 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:38:58 crc kubenswrapper[4757]: I1006 13:38:58.012650 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:38:58 crc kubenswrapper[4757]: I1006 13:38:58.012777 4757 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 06 13:38:58 crc kubenswrapper[4757]: I1006 13:38:58.020835 4757 kubelet_node_status.go:115] "Node was previously registered" node="crc" Oct 06 13:38:58 crc kubenswrapper[4757]: I1006 13:38:58.021196 4757 kubelet_node_status.go:79] "Successfully registered node" node="crc" Oct 06 13:38:58 crc kubenswrapper[4757]: I1006 13:38:58.022994 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:38:58 crc kubenswrapper[4757]: I1006 13:38:58.023054 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:38:58 crc kubenswrapper[4757]: I1006 13:38:58.023075 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:38:58 crc kubenswrapper[4757]: I1006 13:38:58.023122 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:38:58 crc kubenswrapper[4757]: I1006 13:38:58.023139 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:38:58Z","lastTransitionTime":"2025-10-06T13:38:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:38:58 crc kubenswrapper[4757]: E1006 13:38:58.041919 4757 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:38:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:38:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:58Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:38:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:38:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:58Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e1ec82ec-57e6-47de-8235-c2486415aecd\\\",\\\"systemUUID\\\":\\\"77015af3-b2cf-40c4-8ed8-504c8efcff1f\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:58Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:58 crc kubenswrapper[4757]: I1006 13:38:58.045431 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:38:58 crc kubenswrapper[4757]: I1006 13:38:58.045492 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:38:58 crc kubenswrapper[4757]: I1006 13:38:58.045508 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:38:58 crc kubenswrapper[4757]: I1006 13:38:58.045527 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:38:58 crc kubenswrapper[4757]: I1006 13:38:58.045540 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:38:58Z","lastTransitionTime":"2025-10-06T13:38:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:38:58 crc kubenswrapper[4757]: E1006 13:38:58.060369 4757 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:38:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:38:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:58Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:38:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:38:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:58Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e1ec82ec-57e6-47de-8235-c2486415aecd\\\",\\\"systemUUID\\\":\\\"77015af3-b2cf-40c4-8ed8-504c8efcff1f\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:58Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:58 crc kubenswrapper[4757]: I1006 13:38:58.062845 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:38:58 crc kubenswrapper[4757]: I1006 13:38:58.062869 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:38:58 crc kubenswrapper[4757]: I1006 13:38:58.062878 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:38:58 crc kubenswrapper[4757]: I1006 13:38:58.062891 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:38:58 crc kubenswrapper[4757]: I1006 13:38:58.062900 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:38:58Z","lastTransitionTime":"2025-10-06T13:38:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:38:58 crc kubenswrapper[4757]: E1006 13:38:58.080986 4757 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:38:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:38:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:58Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:38:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:38:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:58Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e1ec82ec-57e6-47de-8235-c2486415aecd\\\",\\\"systemUUID\\\":\\\"77015af3-b2cf-40c4-8ed8-504c8efcff1f\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:58Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:58 crc kubenswrapper[4757]: I1006 13:38:58.086390 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:38:58 crc kubenswrapper[4757]: I1006 13:38:58.086448 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:38:58 crc kubenswrapper[4757]: I1006 13:38:58.086470 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:38:58 crc kubenswrapper[4757]: I1006 13:38:58.086499 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:38:58 crc kubenswrapper[4757]: I1006 13:38:58.086522 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:38:58Z","lastTransitionTime":"2025-10-06T13:38:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:38:58 crc kubenswrapper[4757]: E1006 13:38:58.102666 4757 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:38:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:38:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:58Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:38:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:38:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:58Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e1ec82ec-57e6-47de-8235-c2486415aecd\\\",\\\"systemUUID\\\":\\\"77015af3-b2cf-40c4-8ed8-504c8efcff1f\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:58Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:58 crc kubenswrapper[4757]: I1006 13:38:58.106798 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:38:58 crc kubenswrapper[4757]: I1006 13:38:58.106828 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:38:58 crc kubenswrapper[4757]: I1006 13:38:58.106838 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:38:58 crc kubenswrapper[4757]: I1006 13:38:58.106852 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:38:58 crc kubenswrapper[4757]: I1006 13:38:58.106864 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:38:58Z","lastTransitionTime":"2025-10-06T13:38:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:38:58 crc kubenswrapper[4757]: E1006 13:38:58.123358 4757 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:38:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:38:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:58Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:38:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:38:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:58Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e1ec82ec-57e6-47de-8235-c2486415aecd\\\",\\\"systemUUID\\\":\\\"77015af3-b2cf-40c4-8ed8-504c8efcff1f\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:58Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:58 crc kubenswrapper[4757]: E1006 13:38:58.123498 4757 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 06 13:38:58 crc kubenswrapper[4757]: I1006 13:38:58.125185 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:38:58 crc kubenswrapper[4757]: I1006 13:38:58.125213 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:38:58 crc kubenswrapper[4757]: I1006 13:38:58.125224 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:38:58 crc kubenswrapper[4757]: I1006 13:38:58.125237 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:38:58 crc kubenswrapper[4757]: I1006 13:38:58.125247 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:38:58Z","lastTransitionTime":"2025-10-06T13:38:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:38:58 crc kubenswrapper[4757]: I1006 13:38:58.182168 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 06 13:38:58 crc kubenswrapper[4757]: E1006 13:38:58.182292 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 06 13:38:58 crc kubenswrapper[4757]: I1006 13:38:58.183376 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 06 13:38:58 crc kubenswrapper[4757]: E1006 13:38:58.183482 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 06 13:38:58 crc kubenswrapper[4757]: I1006 13:38:58.227558 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:38:58 crc kubenswrapper[4757]: I1006 13:38:58.227593 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:38:58 crc kubenswrapper[4757]: I1006 13:38:58.227604 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:38:58 crc kubenswrapper[4757]: I1006 13:38:58.227618 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:38:58 crc kubenswrapper[4757]: I1006 13:38:58.227629 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:38:58Z","lastTransitionTime":"2025-10-06T13:38:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:38:58 crc kubenswrapper[4757]: I1006 13:38:58.330045 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:38:58 crc kubenswrapper[4757]: I1006 13:38:58.330153 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:38:58 crc kubenswrapper[4757]: I1006 13:38:58.330195 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:38:58 crc kubenswrapper[4757]: I1006 13:38:58.330212 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:38:58 crc kubenswrapper[4757]: I1006 13:38:58.330223 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:38:58Z","lastTransitionTime":"2025-10-06T13:38:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:38:58 crc kubenswrapper[4757]: I1006 13:38:58.368548 4757 generic.go:334] "Generic (PLEG): container finished" podID="62d4cce6-0583-40a6-b7ea-2996d07b49b9" containerID="b0e664e1241d006c37ace1635ef93d30b3062960868874ab67b024d872e8b352" exitCode=0 Oct 06 13:38:58 crc kubenswrapper[4757]: I1006 13:38:58.368608 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-rhrzr" event={"ID":"62d4cce6-0583-40a6-b7ea-2996d07b49b9","Type":"ContainerDied","Data":"b0e664e1241d006c37ace1635ef93d30b3062960868874ab67b024d872e8b352"} Oct 06 13:38:58 crc kubenswrapper[4757]: I1006 13:38:58.382355 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9qf7z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9144d9fd-70d7-4a29-8e6b-c020c611980a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7771512cbe14fa9b8d61be3a354a5fb8436b84ddf455ae4426440f1599f7ad90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mf4fb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9qf7z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:58Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:58 crc kubenswrapper[4757]: I1006 13:38:58.384845 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" event={"ID":"a6624d05-e024-49f2-bf87-33e7ea4fccbb","Type":"ContainerStarted","Data":"5852bc46ae07b295a97cca9f5ef7d06484c12744c0f6ee57b2d14acee319fcaf"} Oct 06 13:38:58 crc kubenswrapper[4757]: I1006 13:38:58.398938 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rhrzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62d4cce6-0583-40a6-b7ea-2996d07b49b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a6cfbbedbad94b500e9bc7c5777459ffbf3db038e8a520a2c9c28d381955df55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a6cfbbedbad94b500e9bc7c5777459ffbf3db038e8a520a2c9c28d381955df55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8c16d3a08acde90ccf1beb5e9ced373395af7436ddbc05c24f6489f6f49fc75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8c16d3a08acde90ccf1beb5e9ced373395af7436ddbc05c24f6489f6f49fc75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7471a1d670fcee421cb43e0a8b8a2b5e15c9d428659f8d63be54895493225242\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7471a1d670fcee421cb43e0a8b8a2b5e15c9d428659f8d63be54895493225242\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d5889f0b8d556cbb5791eb0d9d5475c4fa8beb0c5e9614081c8a7002353282a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d5889f0b8d556cbb5791eb0d9d5475c4fa8beb0c5e9614081c8a7002353282a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0e664e1241d006c37ace1635ef93d30b3062960868874ab67b024d872e8b352\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0e664e1241d006c37ace1635ef93d30b3062960868874ab67b024d872e8b352\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rhrzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:58Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:58 crc kubenswrapper[4757]: I1006 13:38:58.411206 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-j889t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a097c3c1-7cfd-4ba8-878d-40b971843e92\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cf3eee034e9bc6cc2729e4cd23569f535ea8d2058f1576f75baaa5cf8407f2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-29wtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:54Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-j889t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:58Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:58 crc kubenswrapper[4757]: I1006 13:38:58.427672 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cd6138e-cd0c-4140-8173-eeb737e5b20a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e5392ae609235f0d0526e4d115125c20a75af131b89c07ae6bfa3b4bac84108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d06bc2afab6df22e2c8140de3e6288a3e5f4190b6bdb31a2e71170275e70cae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://056d1a4bdaee6e39a0ba0105c9b48d23cd37c37936951e8e2aa123f6de93463d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e08edbfb6418e26eb6c3ef0d229a623bc625e8e20c29d41a95adc8ac8dc2534\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:32Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:58Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:58 crc kubenswrapper[4757]: I1006 13:38:58.436654 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:38:58 crc kubenswrapper[4757]: I1006 13:38:58.436697 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:38:58 crc kubenswrapper[4757]: I1006 13:38:58.436709 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:38:58 crc kubenswrapper[4757]: I1006 13:38:58.436726 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:38:58 crc kubenswrapper[4757]: I1006 13:38:58.436737 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:38:58Z","lastTransitionTime":"2025-10-06T13:38:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:38:58 crc kubenswrapper[4757]: I1006 13:38:58.443952 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0010c888-d5ad-4b2b-8309-1647fdf0dee3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a2028c8aaf6b1504a3b00edc15272958a5254f6e621734988e959fe0720f1ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kvf5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://151c97566cda63000d69b51293c84fad70c6e331d3a1b62bd3a03ee5732d5214\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kvf5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-7tb7h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:58Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:58 crc kubenswrapper[4757]: I1006 13:38:58.467937 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b45680ebd17f15c56456a2c0eb5c7b58f6f42524be18df11dacbb3f4d904eac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6454ffe81c62fdfd6a5c0ee4aff48e55b1dd83b14b07a93f915fc3023f6bd255\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:58Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:58 crc kubenswrapper[4757]: I1006 13:38:58.486858 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:58Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:58 crc kubenswrapper[4757]: I1006 13:38:58.503027 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70151c959d98e2cb056e57e50b67c68b1c031beb375b357c8941bc9b81dc040f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:58Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:58 crc kubenswrapper[4757]: I1006 13:38:58.516605 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:58Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:58 crc kubenswrapper[4757]: I1006 13:38:58.539307 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6624d05-e024-49f2-bf87-33e7ea4fccbb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3fe5f1f425ddd770c518939dee061dc2641c1ca6c3538d8d30ac0724a6353936\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3fe5f1f425ddd770c518939dee061dc2641c1ca6c3538d8d30ac0724a6353936\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-58bhb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:58Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:58 crc kubenswrapper[4757]: I1006 13:38:58.540207 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:38:58 crc kubenswrapper[4757]: I1006 13:38:58.540643 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:38:58 crc kubenswrapper[4757]: I1006 13:38:58.540690 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:38:58 crc kubenswrapper[4757]: I1006 13:38:58.540710 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:38:58 crc kubenswrapper[4757]: I1006 13:38:58.540745 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:38:58Z","lastTransitionTime":"2025-10-06T13:38:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:38:58 crc kubenswrapper[4757]: I1006 13:38:58.551964 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:58Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:58 crc kubenswrapper[4757]: I1006 13:38:58.566286 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9j5jn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de18b9fe-e396-469e-a6f6-d87ce91f3270\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4b5ed23b64e4bbc9f76f8cdef77d7f608d8ba029b540b69f64faf266f843155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2cslg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9j5jn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:58Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:58 crc kubenswrapper[4757]: I1006 13:38:58.580592 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aab0f26-c4f7-40b2-892f-0c7d8586a1c1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4119269e9f663bc5c38f4fdc8a5b32f1c3d8446fed6fd36172c41207c9c287b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1bef054ed2b7c27f200f7ab089158ba9946ca17b507c85c377504f539812fcf6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb90630132db1396d9009fe35d6d6b8ab6afe59d32f196cf7a10f3179103227\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://123bbe58d8664c8fad1b86c9c321228f7ffe7cdf14c44e1d9be7802e10a3f0d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://813c083e0e8ba3b397a4b2795de1eac82299d7f673832272138f20162b7f2db2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-06T13:38:46Z\\\",\\\"message\\\":\\\"W1006 13:38:35.655009 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1006 13:38:35.655588 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759757915 cert, and key in /tmp/serving-cert-2033919833/serving-signer.crt, /tmp/serving-cert-2033919833/serving-signer.key\\\\nI1006 13:38:35.906575 1 observer_polling.go:159] Starting file observer\\\\nW1006 13:38:35.908632 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1006 13:38:35.908812 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1006 13:38:35.911572 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2033919833/tls.crt::/tmp/serving-cert-2033919833/tls.key\\\\\\\"\\\\nF1006 13:38:46.393904 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:35Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18af66d22d5905beeec34c4d158a88aadb1a8076e423996cd42e4172a31426e0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:35Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca584765a9838801f99c7fb1f7bbcc740e684033b1b19035ef95b72526c6eda1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca584765a9838801f99c7fb1f7bbcc740e684033b1b19035ef95b72526c6eda1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:32Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:58Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:58 crc kubenswrapper[4757]: I1006 13:38:58.596006 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d100743e6a2245c99a227c3380c1e9cbdfec0eff361d21977a3e313fa8f7bfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:58Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:58 crc kubenswrapper[4757]: I1006 13:38:58.642673 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:38:58 crc kubenswrapper[4757]: I1006 13:38:58.642715 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:38:58 crc kubenswrapper[4757]: I1006 13:38:58.642725 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:38:58 crc kubenswrapper[4757]: I1006 13:38:58.642741 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:38:58 crc kubenswrapper[4757]: I1006 13:38:58.642752 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:38:58Z","lastTransitionTime":"2025-10-06T13:38:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:38:58 crc kubenswrapper[4757]: I1006 13:38:58.744960 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:38:58 crc kubenswrapper[4757]: I1006 13:38:58.744987 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:38:58 crc kubenswrapper[4757]: I1006 13:38:58.744996 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:38:58 crc kubenswrapper[4757]: I1006 13:38:58.745009 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:38:58 crc kubenswrapper[4757]: I1006 13:38:58.745018 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:38:58Z","lastTransitionTime":"2025-10-06T13:38:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:38:58 crc kubenswrapper[4757]: I1006 13:38:58.847185 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:38:58 crc kubenswrapper[4757]: I1006 13:38:58.847242 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:38:58 crc kubenswrapper[4757]: I1006 13:38:58.847265 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:38:58 crc kubenswrapper[4757]: I1006 13:38:58.847293 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:38:58 crc kubenswrapper[4757]: I1006 13:38:58.847314 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:38:58Z","lastTransitionTime":"2025-10-06T13:38:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:38:58 crc kubenswrapper[4757]: I1006 13:38:58.950686 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:38:58 crc kubenswrapper[4757]: I1006 13:38:58.950748 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:38:58 crc kubenswrapper[4757]: I1006 13:38:58.950769 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:38:58 crc kubenswrapper[4757]: I1006 13:38:58.950795 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:38:58 crc kubenswrapper[4757]: I1006 13:38:58.950817 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:38:58Z","lastTransitionTime":"2025-10-06T13:38:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:38:59 crc kubenswrapper[4757]: I1006 13:38:59.054313 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:38:59 crc kubenswrapper[4757]: I1006 13:38:59.054388 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:38:59 crc kubenswrapper[4757]: I1006 13:38:59.054411 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:38:59 crc kubenswrapper[4757]: I1006 13:38:59.054441 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:38:59 crc kubenswrapper[4757]: I1006 13:38:59.054461 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:38:59Z","lastTransitionTime":"2025-10-06T13:38:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:38:59 crc kubenswrapper[4757]: I1006 13:38:59.157208 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:38:59 crc kubenswrapper[4757]: I1006 13:38:59.157266 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:38:59 crc kubenswrapper[4757]: I1006 13:38:59.157288 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:38:59 crc kubenswrapper[4757]: I1006 13:38:59.157327 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:38:59 crc kubenswrapper[4757]: I1006 13:38:59.157349 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:38:59Z","lastTransitionTime":"2025-10-06T13:38:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:38:59 crc kubenswrapper[4757]: I1006 13:38:59.179727 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 06 13:38:59 crc kubenswrapper[4757]: E1006 13:38:59.179923 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 06 13:38:59 crc kubenswrapper[4757]: I1006 13:38:59.260797 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:38:59 crc kubenswrapper[4757]: I1006 13:38:59.260875 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:38:59 crc kubenswrapper[4757]: I1006 13:38:59.260897 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:38:59 crc kubenswrapper[4757]: I1006 13:38:59.260929 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:38:59 crc kubenswrapper[4757]: I1006 13:38:59.260958 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:38:59Z","lastTransitionTime":"2025-10-06T13:38:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:38:59 crc kubenswrapper[4757]: I1006 13:38:59.363330 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:38:59 crc kubenswrapper[4757]: I1006 13:38:59.363649 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:38:59 crc kubenswrapper[4757]: I1006 13:38:59.363661 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:38:59 crc kubenswrapper[4757]: I1006 13:38:59.363677 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:38:59 crc kubenswrapper[4757]: I1006 13:38:59.363689 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:38:59Z","lastTransitionTime":"2025-10-06T13:38:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:38:59 crc kubenswrapper[4757]: I1006 13:38:59.391238 4757 generic.go:334] "Generic (PLEG): container finished" podID="62d4cce6-0583-40a6-b7ea-2996d07b49b9" containerID="d4cbf946357ea3bc8763e1c1b16d8794976662c55043426132f1ece7c52dca82" exitCode=0 Oct 06 13:38:59 crc kubenswrapper[4757]: I1006 13:38:59.391281 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-rhrzr" event={"ID":"62d4cce6-0583-40a6-b7ea-2996d07b49b9","Type":"ContainerDied","Data":"d4cbf946357ea3bc8763e1c1b16d8794976662c55043426132f1ece7c52dca82"} Oct 06 13:38:59 crc kubenswrapper[4757]: I1006 13:38:59.407983 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:59Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:59 crc kubenswrapper[4757]: I1006 13:38:59.418708 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9j5jn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de18b9fe-e396-469e-a6f6-d87ce91f3270\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4b5ed23b64e4bbc9f76f8cdef77d7f608d8ba029b540b69f64faf266f843155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2cslg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9j5jn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:59Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:59 crc kubenswrapper[4757]: I1006 13:38:59.434823 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70151c959d98e2cb056e57e50b67c68b1c031beb375b357c8941bc9b81dc040f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:59Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:59 crc kubenswrapper[4757]: I1006 13:38:59.450529 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:59Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:59 crc kubenswrapper[4757]: I1006 13:38:59.466249 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:38:59 crc kubenswrapper[4757]: I1006 13:38:59.466284 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:38:59 crc kubenswrapper[4757]: I1006 13:38:59.466295 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:38:59 crc kubenswrapper[4757]: I1006 13:38:59.466311 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:38:59 crc kubenswrapper[4757]: I1006 13:38:59.466321 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:38:59Z","lastTransitionTime":"2025-10-06T13:38:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:38:59 crc kubenswrapper[4757]: I1006 13:38:59.470876 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6624d05-e024-49f2-bf87-33e7ea4fccbb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3fe5f1f425ddd770c518939dee061dc2641c1ca6c3538d8d30ac0724a6353936\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3fe5f1f425ddd770c518939dee061dc2641c1ca6c3538d8d30ac0724a6353936\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-58bhb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:59Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:59 crc kubenswrapper[4757]: I1006 13:38:59.486707 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aab0f26-c4f7-40b2-892f-0c7d8586a1c1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4119269e9f663bc5c38f4fdc8a5b32f1c3d8446fed6fd36172c41207c9c287b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1bef054ed2b7c27f200f7ab089158ba9946ca17b507c85c377504f539812fcf6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb90630132db1396d9009fe35d6d6b8ab6afe59d32f196cf7a10f3179103227\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://123bbe58d8664c8fad1b86c9c321228f7ffe7cdf14c44e1d9be7802e10a3f0d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://813c083e0e8ba3b397a4b2795de1eac82299d7f673832272138f20162b7f2db2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-06T13:38:46Z\\\",\\\"message\\\":\\\"W1006 13:38:35.655009 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1006 13:38:35.655588 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759757915 cert, and key in /tmp/serving-cert-2033919833/serving-signer.crt, /tmp/serving-cert-2033919833/serving-signer.key\\\\nI1006 13:38:35.906575 1 observer_polling.go:159] Starting file observer\\\\nW1006 13:38:35.908632 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1006 13:38:35.908812 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1006 13:38:35.911572 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2033919833/tls.crt::/tmp/serving-cert-2033919833/tls.key\\\\\\\"\\\\nF1006 13:38:46.393904 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:35Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18af66d22d5905beeec34c4d158a88aadb1a8076e423996cd42e4172a31426e0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:35Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca584765a9838801f99c7fb1f7bbcc740e684033b1b19035ef95b72526c6eda1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca584765a9838801f99c7fb1f7bbcc740e684033b1b19035ef95b72526c6eda1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:32Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:59Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:59 crc kubenswrapper[4757]: I1006 13:38:59.499431 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d100743e6a2245c99a227c3380c1e9cbdfec0eff361d21977a3e313fa8f7bfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:59Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:59 crc kubenswrapper[4757]: I1006 13:38:59.513855 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cd6138e-cd0c-4140-8173-eeb737e5b20a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e5392ae609235f0d0526e4d115125c20a75af131b89c07ae6bfa3b4bac84108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d06bc2afab6df22e2c8140de3e6288a3e5f4190b6bdb31a2e71170275e70cae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://056d1a4bdaee6e39a0ba0105c9b48d23cd37c37936951e8e2aa123f6de93463d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e08edbfb6418e26eb6c3ef0d229a623bc625e8e20c29d41a95adc8ac8dc2534\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:32Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:59Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:59 crc kubenswrapper[4757]: I1006 13:38:59.524258 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0010c888-d5ad-4b2b-8309-1647fdf0dee3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a2028c8aaf6b1504a3b00edc15272958a5254f6e621734988e959fe0720f1ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kvf5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://151c97566cda63000d69b51293c84fad70c6e331d3a1b62bd3a03ee5732d5214\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kvf5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-7tb7h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:59Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:59 crc kubenswrapper[4757]: I1006 13:38:59.538812 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9qf7z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9144d9fd-70d7-4a29-8e6b-c020c611980a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7771512cbe14fa9b8d61be3a354a5fb8436b84ddf455ae4426440f1599f7ad90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mf4fb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9qf7z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:59Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:59 crc kubenswrapper[4757]: I1006 13:38:59.553751 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rhrzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62d4cce6-0583-40a6-b7ea-2996d07b49b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a6cfbbedbad94b500e9bc7c5777459ffbf3db038e8a520a2c9c28d381955df55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a6cfbbedbad94b500e9bc7c5777459ffbf3db038e8a520a2c9c28d381955df55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8c16d3a08acde90ccf1beb5e9ced373395af7436ddbc05c24f6489f6f49fc75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8c16d3a08acde90ccf1beb5e9ced373395af7436ddbc05c24f6489f6f49fc75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7471a1d670fcee421cb43e0a8b8a2b5e15c9d428659f8d63be54895493225242\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7471a1d670fcee421cb43e0a8b8a2b5e15c9d428659f8d63be54895493225242\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d5889f0b8d556cbb5791eb0d9d5475c4fa8beb0c5e9614081c8a7002353282a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d5889f0b8d556cbb5791eb0d9d5475c4fa8beb0c5e9614081c8a7002353282a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0e664e1241d006c37ace1635ef93d30b3062960868874ab67b024d872e8b352\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0e664e1241d006c37ace1635ef93d30b3062960868874ab67b024d872e8b352\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4cbf946357ea3bc8763e1c1b16d8794976662c55043426132f1ece7c52dca82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4cbf946357ea3bc8763e1c1b16d8794976662c55043426132f1ece7c52dca82\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rhrzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:59Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:59 crc kubenswrapper[4757]: I1006 13:38:59.564407 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-j889t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a097c3c1-7cfd-4ba8-878d-40b971843e92\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cf3eee034e9bc6cc2729e4cd23569f535ea8d2058f1576f75baaa5cf8407f2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-29wtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:54Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-j889t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:59Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:59 crc kubenswrapper[4757]: I1006 13:38:59.569327 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:38:59 crc kubenswrapper[4757]: I1006 13:38:59.569371 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:38:59 crc kubenswrapper[4757]: I1006 13:38:59.569384 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:38:59 crc kubenswrapper[4757]: I1006 13:38:59.569401 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:38:59 crc kubenswrapper[4757]: I1006 13:38:59.569414 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:38:59Z","lastTransitionTime":"2025-10-06T13:38:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:38:59 crc kubenswrapper[4757]: I1006 13:38:59.576890 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b45680ebd17f15c56456a2c0eb5c7b58f6f42524be18df11dacbb3f4d904eac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6454ffe81c62fdfd6a5c0ee4aff48e55b1dd83b14b07a93f915fc3023f6bd255\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:59Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:59 crc kubenswrapper[4757]: I1006 13:38:59.590615 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:38:59Z is after 2025-08-24T17:21:41Z" Oct 06 13:38:59 crc kubenswrapper[4757]: I1006 13:38:59.672432 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:38:59 crc kubenswrapper[4757]: I1006 13:38:59.672476 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:38:59 crc kubenswrapper[4757]: I1006 13:38:59.672488 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:38:59 crc kubenswrapper[4757]: I1006 13:38:59.672539 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:38:59 crc kubenswrapper[4757]: I1006 13:38:59.672553 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:38:59Z","lastTransitionTime":"2025-10-06T13:38:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:38:59 crc kubenswrapper[4757]: I1006 13:38:59.775769 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:38:59 crc kubenswrapper[4757]: I1006 13:38:59.775796 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:38:59 crc kubenswrapper[4757]: I1006 13:38:59.775805 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:38:59 crc kubenswrapper[4757]: I1006 13:38:59.775818 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:38:59 crc kubenswrapper[4757]: I1006 13:38:59.775829 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:38:59Z","lastTransitionTime":"2025-10-06T13:38:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:38:59 crc kubenswrapper[4757]: I1006 13:38:59.854116 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 06 13:38:59 crc kubenswrapper[4757]: E1006 13:38:59.854282 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-06 13:39:07.854247185 +0000 UTC m=+36.351565732 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:38:59 crc kubenswrapper[4757]: I1006 13:38:59.854378 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 06 13:38:59 crc kubenswrapper[4757]: I1006 13:38:59.854426 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 06 13:38:59 crc kubenswrapper[4757]: E1006 13:38:59.854566 4757 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 06 13:38:59 crc kubenswrapper[4757]: E1006 13:38:59.854621 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-06 13:39:07.85461343 +0000 UTC m=+36.351931967 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 06 13:38:59 crc kubenswrapper[4757]: E1006 13:38:59.854566 4757 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 06 13:38:59 crc kubenswrapper[4757]: E1006 13:38:59.854662 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-06 13:39:07.854655462 +0000 UTC m=+36.351973999 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 06 13:38:59 crc kubenswrapper[4757]: I1006 13:38:59.877870 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:38:59 crc kubenswrapper[4757]: I1006 13:38:59.877916 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:38:59 crc kubenswrapper[4757]: I1006 13:38:59.877926 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:38:59 crc kubenswrapper[4757]: I1006 13:38:59.877944 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:38:59 crc kubenswrapper[4757]: I1006 13:38:59.877956 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:38:59Z","lastTransitionTime":"2025-10-06T13:38:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:38:59 crc kubenswrapper[4757]: I1006 13:38:59.955772 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 06 13:38:59 crc kubenswrapper[4757]: I1006 13:38:59.955817 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 06 13:38:59 crc kubenswrapper[4757]: E1006 13:38:59.955955 4757 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 06 13:38:59 crc kubenswrapper[4757]: E1006 13:38:59.955971 4757 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 06 13:38:59 crc kubenswrapper[4757]: E1006 13:38:59.955982 4757 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 06 13:38:59 crc kubenswrapper[4757]: E1006 13:38:59.956033 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-06 13:39:07.956020908 +0000 UTC m=+36.453339445 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 06 13:38:59 crc kubenswrapper[4757]: E1006 13:38:59.956158 4757 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 06 13:38:59 crc kubenswrapper[4757]: E1006 13:38:59.956232 4757 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 06 13:38:59 crc kubenswrapper[4757]: E1006 13:38:59.956266 4757 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 06 13:38:59 crc kubenswrapper[4757]: E1006 13:38:59.956387 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-06 13:39:07.956341972 +0000 UTC m=+36.453660549 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 06 13:38:59 crc kubenswrapper[4757]: I1006 13:38:59.981083 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:38:59 crc kubenswrapper[4757]: I1006 13:38:59.981207 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:38:59 crc kubenswrapper[4757]: I1006 13:38:59.981222 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:38:59 crc kubenswrapper[4757]: I1006 13:38:59.981243 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:38:59 crc kubenswrapper[4757]: I1006 13:38:59.981256 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:38:59Z","lastTransitionTime":"2025-10-06T13:38:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:00 crc kubenswrapper[4757]: I1006 13:39:00.085025 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:00 crc kubenswrapper[4757]: I1006 13:39:00.085108 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:00 crc kubenswrapper[4757]: I1006 13:39:00.085120 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:00 crc kubenswrapper[4757]: I1006 13:39:00.085141 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:00 crc kubenswrapper[4757]: I1006 13:39:00.085156 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:00Z","lastTransitionTime":"2025-10-06T13:39:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:00 crc kubenswrapper[4757]: I1006 13:39:00.179746 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 06 13:39:00 crc kubenswrapper[4757]: I1006 13:39:00.179858 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 06 13:39:00 crc kubenswrapper[4757]: E1006 13:39:00.179964 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 06 13:39:00 crc kubenswrapper[4757]: E1006 13:39:00.180031 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 06 13:39:00 crc kubenswrapper[4757]: I1006 13:39:00.187822 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:00 crc kubenswrapper[4757]: I1006 13:39:00.187878 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:00 crc kubenswrapper[4757]: I1006 13:39:00.187897 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:00 crc kubenswrapper[4757]: I1006 13:39:00.187918 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:00 crc kubenswrapper[4757]: I1006 13:39:00.187937 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:00Z","lastTransitionTime":"2025-10-06T13:39:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:00 crc kubenswrapper[4757]: I1006 13:39:00.291440 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:00 crc kubenswrapper[4757]: I1006 13:39:00.291878 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:00 crc kubenswrapper[4757]: I1006 13:39:00.291893 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:00 crc kubenswrapper[4757]: I1006 13:39:00.291912 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:00 crc kubenswrapper[4757]: I1006 13:39:00.291924 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:00Z","lastTransitionTime":"2025-10-06T13:39:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:00 crc kubenswrapper[4757]: I1006 13:39:00.394676 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:00 crc kubenswrapper[4757]: I1006 13:39:00.394719 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:00 crc kubenswrapper[4757]: I1006 13:39:00.394730 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:00 crc kubenswrapper[4757]: I1006 13:39:00.394746 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:00 crc kubenswrapper[4757]: I1006 13:39:00.394759 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:00Z","lastTransitionTime":"2025-10-06T13:39:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:00 crc kubenswrapper[4757]: I1006 13:39:00.402215 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" event={"ID":"a6624d05-e024-49f2-bf87-33e7ea4fccbb","Type":"ContainerStarted","Data":"a1f6ccb718b501c86d747613e8beb82873d91c5d7f424ce8b7ad45fe99ad0b63"} Oct 06 13:39:00 crc kubenswrapper[4757]: I1006 13:39:00.402684 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" Oct 06 13:39:00 crc kubenswrapper[4757]: I1006 13:39:00.406647 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-rhrzr" event={"ID":"62d4cce6-0583-40a6-b7ea-2996d07b49b9","Type":"ContainerStarted","Data":"886c6f797531b702123d9931997b7b5e3ebcf9c14c566ff90e0382eae806f925"} Oct 06 13:39:00 crc kubenswrapper[4757]: I1006 13:39:00.420731 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:00Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:00 crc kubenswrapper[4757]: I1006 13:39:00.438765 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9j5jn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de18b9fe-e396-469e-a6f6-d87ce91f3270\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4b5ed23b64e4bbc9f76f8cdef77d7f608d8ba029b540b69f64faf266f843155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2cslg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9j5jn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:00Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:00 crc kubenswrapper[4757]: I1006 13:39:00.442007 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" Oct 06 13:39:00 crc kubenswrapper[4757]: I1006 13:39:00.444196 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" Oct 06 13:39:00 crc kubenswrapper[4757]: I1006 13:39:00.460256 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70151c959d98e2cb056e57e50b67c68b1c031beb375b357c8941bc9b81dc040f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:00Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:00 crc kubenswrapper[4757]: I1006 13:39:00.483381 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:00Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:00 crc kubenswrapper[4757]: I1006 13:39:00.498284 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:00 crc kubenswrapper[4757]: I1006 13:39:00.498347 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:00 crc kubenswrapper[4757]: I1006 13:39:00.498364 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:00 crc kubenswrapper[4757]: I1006 13:39:00.498390 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:00 crc kubenswrapper[4757]: I1006 13:39:00.498408 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:00Z","lastTransitionTime":"2025-10-06T13:39:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:00 crc kubenswrapper[4757]: I1006 13:39:00.516223 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6624d05-e024-49f2-bf87-33e7ea4fccbb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbe06e8892901366539846fa68ea4e379c32ca7b21843383dc4a72ac70f92b98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75c8038f49690378c7322e4d2e50f7d5073bb744c3f64317b7c3f4acffaa338d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://df2b012b79afa6379b4a7add72a9093bbdbfcc8d618d6eae350a773fdddacfa8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4093bde478f4910c2b1e2341bb0b3f640454b131e31b9ab7ad5900b8260b7561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1580d51cb7ec22f70787157dc80236a312eca77785329c663d78c02ee9a0685d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38df7deb98a16e465d5fdbad8e04c2e62c5cfb418a48fe9f01da335dba2907ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1f6ccb718b501c86d747613e8beb82873d91c5d7f424ce8b7ad45fe99ad0b63\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5852bc46ae07b295a97cca9f5ef7d06484c12744c0f6ee57b2d14acee319fcaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3fe5f1f425ddd770c518939dee061dc2641c1ca6c3538d8d30ac0724a6353936\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3fe5f1f425ddd770c518939dee061dc2641c1ca6c3538d8d30ac0724a6353936\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-58bhb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:00Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:00 crc kubenswrapper[4757]: I1006 13:39:00.538456 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aab0f26-c4f7-40b2-892f-0c7d8586a1c1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4119269e9f663bc5c38f4fdc8a5b32f1c3d8446fed6fd36172c41207c9c287b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1bef054ed2b7c27f200f7ab089158ba9946ca17b507c85c377504f539812fcf6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb90630132db1396d9009fe35d6d6b8ab6afe59d32f196cf7a10f3179103227\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://123bbe58d8664c8fad1b86c9c321228f7ffe7cdf14c44e1d9be7802e10a3f0d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://813c083e0e8ba3b397a4b2795de1eac82299d7f673832272138f20162b7f2db2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-06T13:38:46Z\\\",\\\"message\\\":\\\"W1006 13:38:35.655009 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1006 13:38:35.655588 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759757915 cert, and key in /tmp/serving-cert-2033919833/serving-signer.crt, /tmp/serving-cert-2033919833/serving-signer.key\\\\nI1006 13:38:35.906575 1 observer_polling.go:159] Starting file observer\\\\nW1006 13:38:35.908632 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1006 13:38:35.908812 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1006 13:38:35.911572 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2033919833/tls.crt::/tmp/serving-cert-2033919833/tls.key\\\\\\\"\\\\nF1006 13:38:46.393904 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:35Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18af66d22d5905beeec34c4d158a88aadb1a8076e423996cd42e4172a31426e0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:35Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca584765a9838801f99c7fb1f7bbcc740e684033b1b19035ef95b72526c6eda1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca584765a9838801f99c7fb1f7bbcc740e684033b1b19035ef95b72526c6eda1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:32Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:00Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:00 crc kubenswrapper[4757]: I1006 13:39:00.556307 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d100743e6a2245c99a227c3380c1e9cbdfec0eff361d21977a3e313fa8f7bfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:00Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:00 crc kubenswrapper[4757]: I1006 13:39:00.570482 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cd6138e-cd0c-4140-8173-eeb737e5b20a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e5392ae609235f0d0526e4d115125c20a75af131b89c07ae6bfa3b4bac84108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d06bc2afab6df22e2c8140de3e6288a3e5f4190b6bdb31a2e71170275e70cae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://056d1a4bdaee6e39a0ba0105c9b48d23cd37c37936951e8e2aa123f6de93463d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e08edbfb6418e26eb6c3ef0d229a623bc625e8e20c29d41a95adc8ac8dc2534\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:32Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:00Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:00 crc kubenswrapper[4757]: I1006 13:39:00.583618 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0010c888-d5ad-4b2b-8309-1647fdf0dee3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a2028c8aaf6b1504a3b00edc15272958a5254f6e621734988e959fe0720f1ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kvf5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://151c97566cda63000d69b51293c84fad70c6e331d3a1b62bd3a03ee5732d5214\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kvf5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-7tb7h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:00Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:00 crc kubenswrapper[4757]: I1006 13:39:00.597680 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9qf7z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9144d9fd-70d7-4a29-8e6b-c020c611980a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7771512cbe14fa9b8d61be3a354a5fb8436b84ddf455ae4426440f1599f7ad90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mf4fb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9qf7z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:00Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:00 crc kubenswrapper[4757]: I1006 13:39:00.600427 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:00 crc kubenswrapper[4757]: I1006 13:39:00.600481 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:00 crc kubenswrapper[4757]: I1006 13:39:00.600500 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:00 crc kubenswrapper[4757]: I1006 13:39:00.600524 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:00 crc kubenswrapper[4757]: I1006 13:39:00.600547 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:00Z","lastTransitionTime":"2025-10-06T13:39:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:00 crc kubenswrapper[4757]: I1006 13:39:00.612447 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rhrzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62d4cce6-0583-40a6-b7ea-2996d07b49b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a6cfbbedbad94b500e9bc7c5777459ffbf3db038e8a520a2c9c28d381955df55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a6cfbbedbad94b500e9bc7c5777459ffbf3db038e8a520a2c9c28d381955df55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8c16d3a08acde90ccf1beb5e9ced373395af7436ddbc05c24f6489f6f49fc75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8c16d3a08acde90ccf1beb5e9ced373395af7436ddbc05c24f6489f6f49fc75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7471a1d670fcee421cb43e0a8b8a2b5e15c9d428659f8d63be54895493225242\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7471a1d670fcee421cb43e0a8b8a2b5e15c9d428659f8d63be54895493225242\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d5889f0b8d556cbb5791eb0d9d5475c4fa8beb0c5e9614081c8a7002353282a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d5889f0b8d556cbb5791eb0d9d5475c4fa8beb0c5e9614081c8a7002353282a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0e664e1241d006c37ace1635ef93d30b3062960868874ab67b024d872e8b352\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0e664e1241d006c37ace1635ef93d30b3062960868874ab67b024d872e8b352\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4cbf946357ea3bc8763e1c1b16d8794976662c55043426132f1ece7c52dca82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4cbf946357ea3bc8763e1c1b16d8794976662c55043426132f1ece7c52dca82\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rhrzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:00Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:00 crc kubenswrapper[4757]: I1006 13:39:00.624798 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-j889t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a097c3c1-7cfd-4ba8-878d-40b971843e92\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cf3eee034e9bc6cc2729e4cd23569f535ea8d2058f1576f75baaa5cf8407f2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-29wtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:54Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-j889t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:00Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:00 crc kubenswrapper[4757]: I1006 13:39:00.642160 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b45680ebd17f15c56456a2c0eb5c7b58f6f42524be18df11dacbb3f4d904eac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6454ffe81c62fdfd6a5c0ee4aff48e55b1dd83b14b07a93f915fc3023f6bd255\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:00Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:00 crc kubenswrapper[4757]: I1006 13:39:00.655935 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:00Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:00 crc kubenswrapper[4757]: I1006 13:39:00.657206 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 06 13:39:00 crc kubenswrapper[4757]: I1006 13:39:00.670232 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aab0f26-c4f7-40b2-892f-0c7d8586a1c1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4119269e9f663bc5c38f4fdc8a5b32f1c3d8446fed6fd36172c41207c9c287b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1bef054ed2b7c27f200f7ab089158ba9946ca17b507c85c377504f539812fcf6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb90630132db1396d9009fe35d6d6b8ab6afe59d32f196cf7a10f3179103227\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://123bbe58d8664c8fad1b86c9c321228f7ffe7cdf14c44e1d9be7802e10a3f0d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://813c083e0e8ba3b397a4b2795de1eac82299d7f673832272138f20162b7f2db2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-06T13:38:46Z\\\",\\\"message\\\":\\\"W1006 13:38:35.655009 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1006 13:38:35.655588 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759757915 cert, and key in /tmp/serving-cert-2033919833/serving-signer.crt, /tmp/serving-cert-2033919833/serving-signer.key\\\\nI1006 13:38:35.906575 1 observer_polling.go:159] Starting file observer\\\\nW1006 13:38:35.908632 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1006 13:38:35.908812 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1006 13:38:35.911572 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2033919833/tls.crt::/tmp/serving-cert-2033919833/tls.key\\\\\\\"\\\\nF1006 13:38:46.393904 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:35Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18af66d22d5905beeec34c4d158a88aadb1a8076e423996cd42e4172a31426e0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:35Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca584765a9838801f99c7fb1f7bbcc740e684033b1b19035ef95b72526c6eda1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca584765a9838801f99c7fb1f7bbcc740e684033b1b19035ef95b72526c6eda1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:32Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:00Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:00 crc kubenswrapper[4757]: I1006 13:39:00.682917 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d100743e6a2245c99a227c3380c1e9cbdfec0eff361d21977a3e313fa8f7bfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:00Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:00 crc kubenswrapper[4757]: I1006 13:39:00.695737 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cd6138e-cd0c-4140-8173-eeb737e5b20a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e5392ae609235f0d0526e4d115125c20a75af131b89c07ae6bfa3b4bac84108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d06bc2afab6df22e2c8140de3e6288a3e5f4190b6bdb31a2e71170275e70cae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://056d1a4bdaee6e39a0ba0105c9b48d23cd37c37936951e8e2aa123f6de93463d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e08edbfb6418e26eb6c3ef0d229a623bc625e8e20c29d41a95adc8ac8dc2534\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:32Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:00Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:00 crc kubenswrapper[4757]: I1006 13:39:00.703598 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:00 crc kubenswrapper[4757]: I1006 13:39:00.703673 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:00 crc kubenswrapper[4757]: I1006 13:39:00.703693 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:00 crc kubenswrapper[4757]: I1006 13:39:00.703712 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:00 crc kubenswrapper[4757]: I1006 13:39:00.703725 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:00Z","lastTransitionTime":"2025-10-06T13:39:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:00 crc kubenswrapper[4757]: I1006 13:39:00.708480 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0010c888-d5ad-4b2b-8309-1647fdf0dee3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a2028c8aaf6b1504a3b00edc15272958a5254f6e621734988e959fe0720f1ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kvf5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://151c97566cda63000d69b51293c84fad70c6e331d3a1b62bd3a03ee5732d5214\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kvf5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-7tb7h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:00Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:00 crc kubenswrapper[4757]: I1006 13:39:00.723327 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9qf7z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9144d9fd-70d7-4a29-8e6b-c020c611980a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7771512cbe14fa9b8d61be3a354a5fb8436b84ddf455ae4426440f1599f7ad90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mf4fb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9qf7z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:00Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:00 crc kubenswrapper[4757]: I1006 13:39:00.740404 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rhrzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62d4cce6-0583-40a6-b7ea-2996d07b49b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://886c6f797531b702123d9931997b7b5e3ebcf9c14c566ff90e0382eae806f925\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a6cfbbedbad94b500e9bc7c5777459ffbf3db038e8a520a2c9c28d381955df55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a6cfbbedbad94b500e9bc7c5777459ffbf3db038e8a520a2c9c28d381955df55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8c16d3a08acde90ccf1beb5e9ced373395af7436ddbc05c24f6489f6f49fc75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8c16d3a08acde90ccf1beb5e9ced373395af7436ddbc05c24f6489f6f49fc75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7471a1d670fcee421cb43e0a8b8a2b5e15c9d428659f8d63be54895493225242\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7471a1d670fcee421cb43e0a8b8a2b5e15c9d428659f8d63be54895493225242\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d5889f0b8d556cbb5791eb0d9d5475c4fa8beb0c5e9614081c8a7002353282a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d5889f0b8d556cbb5791eb0d9d5475c4fa8beb0c5e9614081c8a7002353282a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0e664e1241d006c37ace1635ef93d30b3062960868874ab67b024d872e8b352\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0e664e1241d006c37ace1635ef93d30b3062960868874ab67b024d872e8b352\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4cbf946357ea3bc8763e1c1b16d8794976662c55043426132f1ece7c52dca82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4cbf946357ea3bc8763e1c1b16d8794976662c55043426132f1ece7c52dca82\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rhrzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:00Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:00 crc kubenswrapper[4757]: I1006 13:39:00.752433 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-j889t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a097c3c1-7cfd-4ba8-878d-40b971843e92\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cf3eee034e9bc6cc2729e4cd23569f535ea8d2058f1576f75baaa5cf8407f2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-29wtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:54Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-j889t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:00Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:00 crc kubenswrapper[4757]: I1006 13:39:00.767006 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b45680ebd17f15c56456a2c0eb5c7b58f6f42524be18df11dacbb3f4d904eac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6454ffe81c62fdfd6a5c0ee4aff48e55b1dd83b14b07a93f915fc3023f6bd255\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:00Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:00 crc kubenswrapper[4757]: I1006 13:39:00.779990 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:00Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:00 crc kubenswrapper[4757]: I1006 13:39:00.793393 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:00Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:00 crc kubenswrapper[4757]: I1006 13:39:00.804250 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9j5jn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de18b9fe-e396-469e-a6f6-d87ce91f3270\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4b5ed23b64e4bbc9f76f8cdef77d7f608d8ba029b540b69f64faf266f843155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2cslg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9j5jn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:00Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:00 crc kubenswrapper[4757]: I1006 13:39:00.806475 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:00 crc kubenswrapper[4757]: I1006 13:39:00.806558 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:00 crc kubenswrapper[4757]: I1006 13:39:00.806575 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:00 crc kubenswrapper[4757]: I1006 13:39:00.806595 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:00 crc kubenswrapper[4757]: I1006 13:39:00.806610 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:00Z","lastTransitionTime":"2025-10-06T13:39:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:00 crc kubenswrapper[4757]: I1006 13:39:00.820279 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70151c959d98e2cb056e57e50b67c68b1c031beb375b357c8941bc9b81dc040f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:00Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:00 crc kubenswrapper[4757]: I1006 13:39:00.832076 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:00Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:00 crc kubenswrapper[4757]: I1006 13:39:00.850325 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6624d05-e024-49f2-bf87-33e7ea4fccbb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbe06e8892901366539846fa68ea4e379c32ca7b21843383dc4a72ac70f92b98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75c8038f49690378c7322e4d2e50f7d5073bb744c3f64317b7c3f4acffaa338d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://df2b012b79afa6379b4a7add72a9093bbdbfcc8d618d6eae350a773fdddacfa8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4093bde478f4910c2b1e2341bb0b3f640454b131e31b9ab7ad5900b8260b7561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1580d51cb7ec22f70787157dc80236a312eca77785329c663d78c02ee9a0685d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38df7deb98a16e465d5fdbad8e04c2e62c5cfb418a48fe9f01da335dba2907ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1f6ccb718b501c86d747613e8beb82873d91c5d7f424ce8b7ad45fe99ad0b63\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5852bc46ae07b295a97cca9f5ef7d06484c12744c0f6ee57b2d14acee319fcaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3fe5f1f425ddd770c518939dee061dc2641c1ca6c3538d8d30ac0724a6353936\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3fe5f1f425ddd770c518939dee061dc2641c1ca6c3538d8d30ac0724a6353936\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-58bhb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:00Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:00 crc kubenswrapper[4757]: I1006 13:39:00.862766 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b45680ebd17f15c56456a2c0eb5c7b58f6f42524be18df11dacbb3f4d904eac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6454ffe81c62fdfd6a5c0ee4aff48e55b1dd83b14b07a93f915fc3023f6bd255\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:00Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:00 crc kubenswrapper[4757]: I1006 13:39:00.876209 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:00Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:00 crc kubenswrapper[4757]: I1006 13:39:00.901059 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6624d05-e024-49f2-bf87-33e7ea4fccbb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbe06e8892901366539846fa68ea4e379c32ca7b21843383dc4a72ac70f92b98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75c8038f49690378c7322e4d2e50f7d5073bb744c3f64317b7c3f4acffaa338d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://df2b012b79afa6379b4a7add72a9093bbdbfcc8d618d6eae350a773fdddacfa8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4093bde478f4910c2b1e2341bb0b3f640454b131e31b9ab7ad5900b8260b7561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1580d51cb7ec22f70787157dc80236a312eca77785329c663d78c02ee9a0685d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38df7deb98a16e465d5fdbad8e04c2e62c5cfb418a48fe9f01da335dba2907ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1f6ccb718b501c86d747613e8beb82873d91c5d7f424ce8b7ad45fe99ad0b63\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5852bc46ae07b295a97cca9f5ef7d06484c12744c0f6ee57b2d14acee319fcaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3fe5f1f425ddd770c518939dee061dc2641c1ca6c3538d8d30ac0724a6353936\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3fe5f1f425ddd770c518939dee061dc2641c1ca6c3538d8d30ac0724a6353936\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-58bhb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:00Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:00 crc kubenswrapper[4757]: I1006 13:39:00.908835 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:00 crc kubenswrapper[4757]: I1006 13:39:00.908893 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:00 crc kubenswrapper[4757]: I1006 13:39:00.908910 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:00 crc kubenswrapper[4757]: I1006 13:39:00.908934 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:00 crc kubenswrapper[4757]: I1006 13:39:00.908952 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:00Z","lastTransitionTime":"2025-10-06T13:39:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:00 crc kubenswrapper[4757]: I1006 13:39:00.914069 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:00Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:00 crc kubenswrapper[4757]: I1006 13:39:00.926375 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9j5jn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de18b9fe-e396-469e-a6f6-d87ce91f3270\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4b5ed23b64e4bbc9f76f8cdef77d7f608d8ba029b540b69f64faf266f843155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2cslg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9j5jn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:00Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:00 crc kubenswrapper[4757]: I1006 13:39:00.938569 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70151c959d98e2cb056e57e50b67c68b1c031beb375b357c8941bc9b81dc040f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:00Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:00 crc kubenswrapper[4757]: I1006 13:39:00.952311 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:00Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:00 crc kubenswrapper[4757]: I1006 13:39:00.972050 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aab0f26-c4f7-40b2-892f-0c7d8586a1c1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4119269e9f663bc5c38f4fdc8a5b32f1c3d8446fed6fd36172c41207c9c287b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1bef054ed2b7c27f200f7ab089158ba9946ca17b507c85c377504f539812fcf6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb90630132db1396d9009fe35d6d6b8ab6afe59d32f196cf7a10f3179103227\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://123bbe58d8664c8fad1b86c9c321228f7ffe7cdf14c44e1d9be7802e10a3f0d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://813c083e0e8ba3b397a4b2795de1eac82299d7f673832272138f20162b7f2db2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-06T13:38:46Z\\\",\\\"message\\\":\\\"W1006 13:38:35.655009 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1006 13:38:35.655588 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759757915 cert, and key in /tmp/serving-cert-2033919833/serving-signer.crt, /tmp/serving-cert-2033919833/serving-signer.key\\\\nI1006 13:38:35.906575 1 observer_polling.go:159] Starting file observer\\\\nW1006 13:38:35.908632 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1006 13:38:35.908812 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1006 13:38:35.911572 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2033919833/tls.crt::/tmp/serving-cert-2033919833/tls.key\\\\\\\"\\\\nF1006 13:38:46.393904 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:35Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18af66d22d5905beeec34c4d158a88aadb1a8076e423996cd42e4172a31426e0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:35Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca584765a9838801f99c7fb1f7bbcc740e684033b1b19035ef95b72526c6eda1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca584765a9838801f99c7fb1f7bbcc740e684033b1b19035ef95b72526c6eda1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:32Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:00Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:00 crc kubenswrapper[4757]: I1006 13:39:00.989036 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d100743e6a2245c99a227c3380c1e9cbdfec0eff361d21977a3e313fa8f7bfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:00Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:01 crc kubenswrapper[4757]: I1006 13:39:01.006409 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-j889t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a097c3c1-7cfd-4ba8-878d-40b971843e92\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cf3eee034e9bc6cc2729e4cd23569f535ea8d2058f1576f75baaa5cf8407f2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-29wtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:54Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-j889t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:01Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:01 crc kubenswrapper[4757]: I1006 13:39:01.011403 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:01 crc kubenswrapper[4757]: I1006 13:39:01.011613 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:01 crc kubenswrapper[4757]: I1006 13:39:01.011762 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:01 crc kubenswrapper[4757]: I1006 13:39:01.011914 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:01 crc kubenswrapper[4757]: I1006 13:39:01.012047 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:01Z","lastTransitionTime":"2025-10-06T13:39:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:01 crc kubenswrapper[4757]: I1006 13:39:01.025550 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cd6138e-cd0c-4140-8173-eeb737e5b20a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e5392ae609235f0d0526e4d115125c20a75af131b89c07ae6bfa3b4bac84108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d06bc2afab6df22e2c8140de3e6288a3e5f4190b6bdb31a2e71170275e70cae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://056d1a4bdaee6e39a0ba0105c9b48d23cd37c37936951e8e2aa123f6de93463d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e08edbfb6418e26eb6c3ef0d229a623bc625e8e20c29d41a95adc8ac8dc2534\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:32Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:01Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:01 crc kubenswrapper[4757]: I1006 13:39:01.040148 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0010c888-d5ad-4b2b-8309-1647fdf0dee3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a2028c8aaf6b1504a3b00edc15272958a5254f6e621734988e959fe0720f1ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kvf5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://151c97566cda63000d69b51293c84fad70c6e331d3a1b62bd3a03ee5732d5214\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kvf5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-7tb7h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:01Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:01 crc kubenswrapper[4757]: I1006 13:39:01.060073 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9qf7z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9144d9fd-70d7-4a29-8e6b-c020c611980a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7771512cbe14fa9b8d61be3a354a5fb8436b84ddf455ae4426440f1599f7ad90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mf4fb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9qf7z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:01Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:01 crc kubenswrapper[4757]: I1006 13:39:01.084901 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rhrzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62d4cce6-0583-40a6-b7ea-2996d07b49b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://886c6f797531b702123d9931997b7b5e3ebcf9c14c566ff90e0382eae806f925\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a6cfbbedbad94b500e9bc7c5777459ffbf3db038e8a520a2c9c28d381955df55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a6cfbbedbad94b500e9bc7c5777459ffbf3db038e8a520a2c9c28d381955df55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8c16d3a08acde90ccf1beb5e9ced373395af7436ddbc05c24f6489f6f49fc75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8c16d3a08acde90ccf1beb5e9ced373395af7436ddbc05c24f6489f6f49fc75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7471a1d670fcee421cb43e0a8b8a2b5e15c9d428659f8d63be54895493225242\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7471a1d670fcee421cb43e0a8b8a2b5e15c9d428659f8d63be54895493225242\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d5889f0b8d556cbb5791eb0d9d5475c4fa8beb0c5e9614081c8a7002353282a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d5889f0b8d556cbb5791eb0d9d5475c4fa8beb0c5e9614081c8a7002353282a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0e664e1241d006c37ace1635ef93d30b3062960868874ab67b024d872e8b352\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0e664e1241d006c37ace1635ef93d30b3062960868874ab67b024d872e8b352\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4cbf946357ea3bc8763e1c1b16d8794976662c55043426132f1ece7c52dca82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4cbf946357ea3bc8763e1c1b16d8794976662c55043426132f1ece7c52dca82\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rhrzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:01Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:01 crc kubenswrapper[4757]: I1006 13:39:01.115478 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:01 crc kubenswrapper[4757]: I1006 13:39:01.115544 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:01 crc kubenswrapper[4757]: I1006 13:39:01.115567 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:01 crc kubenswrapper[4757]: I1006 13:39:01.115594 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:01 crc kubenswrapper[4757]: I1006 13:39:01.115613 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:01Z","lastTransitionTime":"2025-10-06T13:39:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:01 crc kubenswrapper[4757]: I1006 13:39:01.180211 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 06 13:39:01 crc kubenswrapper[4757]: E1006 13:39:01.180446 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 06 13:39:01 crc kubenswrapper[4757]: I1006 13:39:01.218697 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:01 crc kubenswrapper[4757]: I1006 13:39:01.218759 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:01 crc kubenswrapper[4757]: I1006 13:39:01.218780 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:01 crc kubenswrapper[4757]: I1006 13:39:01.218805 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:01 crc kubenswrapper[4757]: I1006 13:39:01.218825 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:01Z","lastTransitionTime":"2025-10-06T13:39:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:01 crc kubenswrapper[4757]: I1006 13:39:01.321839 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:01 crc kubenswrapper[4757]: I1006 13:39:01.321889 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:01 crc kubenswrapper[4757]: I1006 13:39:01.321906 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:01 crc kubenswrapper[4757]: I1006 13:39:01.321929 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:01 crc kubenswrapper[4757]: I1006 13:39:01.321946 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:01Z","lastTransitionTime":"2025-10-06T13:39:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:01 crc kubenswrapper[4757]: I1006 13:39:01.411381 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" Oct 06 13:39:01 crc kubenswrapper[4757]: I1006 13:39:01.424249 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:01 crc kubenswrapper[4757]: I1006 13:39:01.424298 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:01 crc kubenswrapper[4757]: I1006 13:39:01.424312 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:01 crc kubenswrapper[4757]: I1006 13:39:01.424330 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:01 crc kubenswrapper[4757]: I1006 13:39:01.424344 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:01Z","lastTransitionTime":"2025-10-06T13:39:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:01 crc kubenswrapper[4757]: I1006 13:39:01.444179 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" Oct 06 13:39:01 crc kubenswrapper[4757]: I1006 13:39:01.459759 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b45680ebd17f15c56456a2c0eb5c7b58f6f42524be18df11dacbb3f4d904eac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6454ffe81c62fdfd6a5c0ee4aff48e55b1dd83b14b07a93f915fc3023f6bd255\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:01Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:01 crc kubenswrapper[4757]: I1006 13:39:01.478615 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:01Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:01 crc kubenswrapper[4757]: I1006 13:39:01.495940 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:01Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:01 crc kubenswrapper[4757]: I1006 13:39:01.510420 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9j5jn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de18b9fe-e396-469e-a6f6-d87ce91f3270\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4b5ed23b64e4bbc9f76f8cdef77d7f608d8ba029b540b69f64faf266f843155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2cslg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9j5jn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:01Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:01 crc kubenswrapper[4757]: I1006 13:39:01.526409 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70151c959d98e2cb056e57e50b67c68b1c031beb375b357c8941bc9b81dc040f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:01Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:01 crc kubenswrapper[4757]: I1006 13:39:01.527552 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:01 crc kubenswrapper[4757]: I1006 13:39:01.527814 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:01 crc kubenswrapper[4757]: I1006 13:39:01.528060 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:01 crc kubenswrapper[4757]: I1006 13:39:01.528362 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:01 crc kubenswrapper[4757]: I1006 13:39:01.528581 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:01Z","lastTransitionTime":"2025-10-06T13:39:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:01 crc kubenswrapper[4757]: I1006 13:39:01.544172 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:01Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:01 crc kubenswrapper[4757]: I1006 13:39:01.572854 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6624d05-e024-49f2-bf87-33e7ea4fccbb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbe06e8892901366539846fa68ea4e379c32ca7b21843383dc4a72ac70f92b98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75c8038f49690378c7322e4d2e50f7d5073bb744c3f64317b7c3f4acffaa338d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://df2b012b79afa6379b4a7add72a9093bbdbfcc8d618d6eae350a773fdddacfa8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4093bde478f4910c2b1e2341bb0b3f640454b131e31b9ab7ad5900b8260b7561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1580d51cb7ec22f70787157dc80236a312eca77785329c663d78c02ee9a0685d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38df7deb98a16e465d5fdbad8e04c2e62c5cfb418a48fe9f01da335dba2907ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1f6ccb718b501c86d747613e8beb82873d91c5d7f424ce8b7ad45fe99ad0b63\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5852bc46ae07b295a97cca9f5ef7d06484c12744c0f6ee57b2d14acee319fcaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3fe5f1f425ddd770c518939dee061dc2641c1ca6c3538d8d30ac0724a6353936\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3fe5f1f425ddd770c518939dee061dc2641c1ca6c3538d8d30ac0724a6353936\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-58bhb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:01Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:01 crc kubenswrapper[4757]: I1006 13:39:01.596010 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aab0f26-c4f7-40b2-892f-0c7d8586a1c1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4119269e9f663bc5c38f4fdc8a5b32f1c3d8446fed6fd36172c41207c9c287b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1bef054ed2b7c27f200f7ab089158ba9946ca17b507c85c377504f539812fcf6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb90630132db1396d9009fe35d6d6b8ab6afe59d32f196cf7a10f3179103227\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://123bbe58d8664c8fad1b86c9c321228f7ffe7cdf14c44e1d9be7802e10a3f0d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://813c083e0e8ba3b397a4b2795de1eac82299d7f673832272138f20162b7f2db2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-06T13:38:46Z\\\",\\\"message\\\":\\\"W1006 13:38:35.655009 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1006 13:38:35.655588 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759757915 cert, and key in /tmp/serving-cert-2033919833/serving-signer.crt, /tmp/serving-cert-2033919833/serving-signer.key\\\\nI1006 13:38:35.906575 1 observer_polling.go:159] Starting file observer\\\\nW1006 13:38:35.908632 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1006 13:38:35.908812 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1006 13:38:35.911572 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2033919833/tls.crt::/tmp/serving-cert-2033919833/tls.key\\\\\\\"\\\\nF1006 13:38:46.393904 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:35Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18af66d22d5905beeec34c4d158a88aadb1a8076e423996cd42e4172a31426e0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:35Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca584765a9838801f99c7fb1f7bbcc740e684033b1b19035ef95b72526c6eda1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca584765a9838801f99c7fb1f7bbcc740e684033b1b19035ef95b72526c6eda1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:32Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:01Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:01 crc kubenswrapper[4757]: I1006 13:39:01.617875 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d100743e6a2245c99a227c3380c1e9cbdfec0eff361d21977a3e313fa8f7bfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:01Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:01 crc kubenswrapper[4757]: I1006 13:39:01.635288 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:01 crc kubenswrapper[4757]: I1006 13:39:01.635355 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:01 crc kubenswrapper[4757]: I1006 13:39:01.635374 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:01 crc kubenswrapper[4757]: I1006 13:39:01.635414 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:01 crc kubenswrapper[4757]: I1006 13:39:01.636568 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:01Z","lastTransitionTime":"2025-10-06T13:39:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:01 crc kubenswrapper[4757]: I1006 13:39:01.636962 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cd6138e-cd0c-4140-8173-eeb737e5b20a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e5392ae609235f0d0526e4d115125c20a75af131b89c07ae6bfa3b4bac84108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d06bc2afab6df22e2c8140de3e6288a3e5f4190b6bdb31a2e71170275e70cae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://056d1a4bdaee6e39a0ba0105c9b48d23cd37c37936951e8e2aa123f6de93463d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e08edbfb6418e26eb6c3ef0d229a623bc625e8e20c29d41a95adc8ac8dc2534\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:32Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:01Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:01 crc kubenswrapper[4757]: I1006 13:39:01.652872 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0010c888-d5ad-4b2b-8309-1647fdf0dee3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a2028c8aaf6b1504a3b00edc15272958a5254f6e621734988e959fe0720f1ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kvf5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://151c97566cda63000d69b51293c84fad70c6e331d3a1b62bd3a03ee5732d5214\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kvf5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-7tb7h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:01Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:01 crc kubenswrapper[4757]: I1006 13:39:01.670077 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9qf7z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9144d9fd-70d7-4a29-8e6b-c020c611980a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7771512cbe14fa9b8d61be3a354a5fb8436b84ddf455ae4426440f1599f7ad90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mf4fb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9qf7z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:01Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:01 crc kubenswrapper[4757]: I1006 13:39:01.694614 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rhrzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62d4cce6-0583-40a6-b7ea-2996d07b49b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://886c6f797531b702123d9931997b7b5e3ebcf9c14c566ff90e0382eae806f925\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a6cfbbedbad94b500e9bc7c5777459ffbf3db038e8a520a2c9c28d381955df55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a6cfbbedbad94b500e9bc7c5777459ffbf3db038e8a520a2c9c28d381955df55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8c16d3a08acde90ccf1beb5e9ced373395af7436ddbc05c24f6489f6f49fc75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8c16d3a08acde90ccf1beb5e9ced373395af7436ddbc05c24f6489f6f49fc75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7471a1d670fcee421cb43e0a8b8a2b5e15c9d428659f8d63be54895493225242\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7471a1d670fcee421cb43e0a8b8a2b5e15c9d428659f8d63be54895493225242\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d5889f0b8d556cbb5791eb0d9d5475c4fa8beb0c5e9614081c8a7002353282a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d5889f0b8d556cbb5791eb0d9d5475c4fa8beb0c5e9614081c8a7002353282a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0e664e1241d006c37ace1635ef93d30b3062960868874ab67b024d872e8b352\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0e664e1241d006c37ace1635ef93d30b3062960868874ab67b024d872e8b352\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4cbf946357ea3bc8763e1c1b16d8794976662c55043426132f1ece7c52dca82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4cbf946357ea3bc8763e1c1b16d8794976662c55043426132f1ece7c52dca82\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rhrzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:01Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:01 crc kubenswrapper[4757]: I1006 13:39:01.711458 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-j889t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a097c3c1-7cfd-4ba8-878d-40b971843e92\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cf3eee034e9bc6cc2729e4cd23569f535ea8d2058f1576f75baaa5cf8407f2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-29wtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:54Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-j889t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:01Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:01 crc kubenswrapper[4757]: I1006 13:39:01.739585 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:01 crc kubenswrapper[4757]: I1006 13:39:01.739636 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:01 crc kubenswrapper[4757]: I1006 13:39:01.739651 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:01 crc kubenswrapper[4757]: I1006 13:39:01.739670 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:01 crc kubenswrapper[4757]: I1006 13:39:01.739686 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:01Z","lastTransitionTime":"2025-10-06T13:39:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:01 crc kubenswrapper[4757]: I1006 13:39:01.843134 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:01 crc kubenswrapper[4757]: I1006 13:39:01.843184 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:01 crc kubenswrapper[4757]: I1006 13:39:01.843196 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:01 crc kubenswrapper[4757]: I1006 13:39:01.843212 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:01 crc kubenswrapper[4757]: I1006 13:39:01.843223 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:01Z","lastTransitionTime":"2025-10-06T13:39:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:01 crc kubenswrapper[4757]: I1006 13:39:01.945713 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:01 crc kubenswrapper[4757]: I1006 13:39:01.945747 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:01 crc kubenswrapper[4757]: I1006 13:39:01.945761 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:01 crc kubenswrapper[4757]: I1006 13:39:01.945777 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:01 crc kubenswrapper[4757]: I1006 13:39:01.945787 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:01Z","lastTransitionTime":"2025-10-06T13:39:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:02 crc kubenswrapper[4757]: I1006 13:39:02.048469 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:02 crc kubenswrapper[4757]: I1006 13:39:02.048497 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:02 crc kubenswrapper[4757]: I1006 13:39:02.048506 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:02 crc kubenswrapper[4757]: I1006 13:39:02.048519 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:02 crc kubenswrapper[4757]: I1006 13:39:02.048529 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:02Z","lastTransitionTime":"2025-10-06T13:39:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:02 crc kubenswrapper[4757]: I1006 13:39:02.150793 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:02 crc kubenswrapper[4757]: I1006 13:39:02.150847 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:02 crc kubenswrapper[4757]: I1006 13:39:02.150873 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:02 crc kubenswrapper[4757]: I1006 13:39:02.150894 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:02 crc kubenswrapper[4757]: I1006 13:39:02.150909 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:02Z","lastTransitionTime":"2025-10-06T13:39:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:02 crc kubenswrapper[4757]: I1006 13:39:02.179504 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 06 13:39:02 crc kubenswrapper[4757]: I1006 13:39:02.179550 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 06 13:39:02 crc kubenswrapper[4757]: E1006 13:39:02.179643 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 06 13:39:02 crc kubenswrapper[4757]: E1006 13:39:02.179782 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 06 13:39:02 crc kubenswrapper[4757]: I1006 13:39:02.193070 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:02Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:02 crc kubenswrapper[4757]: I1006 13:39:02.204670 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b45680ebd17f15c56456a2c0eb5c7b58f6f42524be18df11dacbb3f4d904eac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6454ffe81c62fdfd6a5c0ee4aff48e55b1dd83b14b07a93f915fc3023f6bd255\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:02Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:02 crc kubenswrapper[4757]: I1006 13:39:02.219145 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:02Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:02 crc kubenswrapper[4757]: I1006 13:39:02.227833 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9j5jn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de18b9fe-e396-469e-a6f6-d87ce91f3270\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4b5ed23b64e4bbc9f76f8cdef77d7f608d8ba029b540b69f64faf266f843155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2cslg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9j5jn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:02Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:02 crc kubenswrapper[4757]: I1006 13:39:02.239568 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70151c959d98e2cb056e57e50b67c68b1c031beb375b357c8941bc9b81dc040f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:02Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:02 crc kubenswrapper[4757]: I1006 13:39:02.250461 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:02Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:02 crc kubenswrapper[4757]: I1006 13:39:02.252948 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:02 crc kubenswrapper[4757]: I1006 13:39:02.252978 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:02 crc kubenswrapper[4757]: I1006 13:39:02.252986 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:02 crc kubenswrapper[4757]: I1006 13:39:02.253019 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:02 crc kubenswrapper[4757]: I1006 13:39:02.253029 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:02Z","lastTransitionTime":"2025-10-06T13:39:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:02 crc kubenswrapper[4757]: I1006 13:39:02.266591 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6624d05-e024-49f2-bf87-33e7ea4fccbb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbe06e8892901366539846fa68ea4e379c32ca7b21843383dc4a72ac70f92b98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75c8038f49690378c7322e4d2e50f7d5073bb744c3f64317b7c3f4acffaa338d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://df2b012b79afa6379b4a7add72a9093bbdbfcc8d618d6eae350a773fdddacfa8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4093bde478f4910c2b1e2341bb0b3f640454b131e31b9ab7ad5900b8260b7561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1580d51cb7ec22f70787157dc80236a312eca77785329c663d78c02ee9a0685d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38df7deb98a16e465d5fdbad8e04c2e62c5cfb418a48fe9f01da335dba2907ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1f6ccb718b501c86d747613e8beb82873d91c5d7f424ce8b7ad45fe99ad0b63\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5852bc46ae07b295a97cca9f5ef7d06484c12744c0f6ee57b2d14acee319fcaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3fe5f1f425ddd770c518939dee061dc2641c1ca6c3538d8d30ac0724a6353936\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3fe5f1f425ddd770c518939dee061dc2641c1ca6c3538d8d30ac0724a6353936\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-58bhb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:02Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:02 crc kubenswrapper[4757]: I1006 13:39:02.288623 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aab0f26-c4f7-40b2-892f-0c7d8586a1c1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4119269e9f663bc5c38f4fdc8a5b32f1c3d8446fed6fd36172c41207c9c287b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1bef054ed2b7c27f200f7ab089158ba9946ca17b507c85c377504f539812fcf6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb90630132db1396d9009fe35d6d6b8ab6afe59d32f196cf7a10f3179103227\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://123bbe58d8664c8fad1b86c9c321228f7ffe7cdf14c44e1d9be7802e10a3f0d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://813c083e0e8ba3b397a4b2795de1eac82299d7f673832272138f20162b7f2db2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-06T13:38:46Z\\\",\\\"message\\\":\\\"W1006 13:38:35.655009 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1006 13:38:35.655588 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759757915 cert, and key in /tmp/serving-cert-2033919833/serving-signer.crt, /tmp/serving-cert-2033919833/serving-signer.key\\\\nI1006 13:38:35.906575 1 observer_polling.go:159] Starting file observer\\\\nW1006 13:38:35.908632 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1006 13:38:35.908812 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1006 13:38:35.911572 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2033919833/tls.crt::/tmp/serving-cert-2033919833/tls.key\\\\\\\"\\\\nF1006 13:38:46.393904 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:35Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18af66d22d5905beeec34c4d158a88aadb1a8076e423996cd42e4172a31426e0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:35Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca584765a9838801f99c7fb1f7bbcc740e684033b1b19035ef95b72526c6eda1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca584765a9838801f99c7fb1f7bbcc740e684033b1b19035ef95b72526c6eda1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:32Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:02Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:02 crc kubenswrapper[4757]: I1006 13:39:02.302983 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d100743e6a2245c99a227c3380c1e9cbdfec0eff361d21977a3e313fa8f7bfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:02Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:02 crc kubenswrapper[4757]: I1006 13:39:02.315494 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0010c888-d5ad-4b2b-8309-1647fdf0dee3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a2028c8aaf6b1504a3b00edc15272958a5254f6e621734988e959fe0720f1ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kvf5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://151c97566cda63000d69b51293c84fad70c6e331d3a1b62bd3a03ee5732d5214\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kvf5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-7tb7h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:02Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:02 crc kubenswrapper[4757]: I1006 13:39:02.328945 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9qf7z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9144d9fd-70d7-4a29-8e6b-c020c611980a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7771512cbe14fa9b8d61be3a354a5fb8436b84ddf455ae4426440f1599f7ad90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mf4fb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9qf7z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:02Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:02 crc kubenswrapper[4757]: I1006 13:39:02.342504 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rhrzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62d4cce6-0583-40a6-b7ea-2996d07b49b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://886c6f797531b702123d9931997b7b5e3ebcf9c14c566ff90e0382eae806f925\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a6cfbbedbad94b500e9bc7c5777459ffbf3db038e8a520a2c9c28d381955df55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a6cfbbedbad94b500e9bc7c5777459ffbf3db038e8a520a2c9c28d381955df55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8c16d3a08acde90ccf1beb5e9ced373395af7436ddbc05c24f6489f6f49fc75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8c16d3a08acde90ccf1beb5e9ced373395af7436ddbc05c24f6489f6f49fc75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7471a1d670fcee421cb43e0a8b8a2b5e15c9d428659f8d63be54895493225242\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7471a1d670fcee421cb43e0a8b8a2b5e15c9d428659f8d63be54895493225242\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d5889f0b8d556cbb5791eb0d9d5475c4fa8beb0c5e9614081c8a7002353282a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d5889f0b8d556cbb5791eb0d9d5475c4fa8beb0c5e9614081c8a7002353282a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0e664e1241d006c37ace1635ef93d30b3062960868874ab67b024d872e8b352\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0e664e1241d006c37ace1635ef93d30b3062960868874ab67b024d872e8b352\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4cbf946357ea3bc8763e1c1b16d8794976662c55043426132f1ece7c52dca82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4cbf946357ea3bc8763e1c1b16d8794976662c55043426132f1ece7c52dca82\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rhrzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:02Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:02 crc kubenswrapper[4757]: I1006 13:39:02.352754 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-j889t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a097c3c1-7cfd-4ba8-878d-40b971843e92\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cf3eee034e9bc6cc2729e4cd23569f535ea8d2058f1576f75baaa5cf8407f2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-29wtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:54Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-j889t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:02Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:02 crc kubenswrapper[4757]: I1006 13:39:02.355063 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:02 crc kubenswrapper[4757]: I1006 13:39:02.355149 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:02 crc kubenswrapper[4757]: I1006 13:39:02.355167 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:02 crc kubenswrapper[4757]: I1006 13:39:02.355192 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:02 crc kubenswrapper[4757]: I1006 13:39:02.355211 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:02Z","lastTransitionTime":"2025-10-06T13:39:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:02 crc kubenswrapper[4757]: I1006 13:39:02.366706 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cd6138e-cd0c-4140-8173-eeb737e5b20a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e5392ae609235f0d0526e4d115125c20a75af131b89c07ae6bfa3b4bac84108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d06bc2afab6df22e2c8140de3e6288a3e5f4190b6bdb31a2e71170275e70cae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://056d1a4bdaee6e39a0ba0105c9b48d23cd37c37936951e8e2aa123f6de93463d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e08edbfb6418e26eb6c3ef0d229a623bc625e8e20c29d41a95adc8ac8dc2534\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:32Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:02Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:02 crc kubenswrapper[4757]: I1006 13:39:02.458539 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:02 crc kubenswrapper[4757]: I1006 13:39:02.458609 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:02 crc kubenswrapper[4757]: I1006 13:39:02.458619 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:02 crc kubenswrapper[4757]: I1006 13:39:02.458634 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:02 crc kubenswrapper[4757]: I1006 13:39:02.458643 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:02Z","lastTransitionTime":"2025-10-06T13:39:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:02 crc kubenswrapper[4757]: I1006 13:39:02.561994 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:02 crc kubenswrapper[4757]: I1006 13:39:02.562083 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:02 crc kubenswrapper[4757]: I1006 13:39:02.562140 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:02 crc kubenswrapper[4757]: I1006 13:39:02.562166 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:02 crc kubenswrapper[4757]: I1006 13:39:02.562183 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:02Z","lastTransitionTime":"2025-10-06T13:39:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:02 crc kubenswrapper[4757]: I1006 13:39:02.665391 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:02 crc kubenswrapper[4757]: I1006 13:39:02.665460 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:02 crc kubenswrapper[4757]: I1006 13:39:02.665478 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:02 crc kubenswrapper[4757]: I1006 13:39:02.665502 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:02 crc kubenswrapper[4757]: I1006 13:39:02.665519 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:02Z","lastTransitionTime":"2025-10-06T13:39:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:02 crc kubenswrapper[4757]: I1006 13:39:02.769082 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:02 crc kubenswrapper[4757]: I1006 13:39:02.769173 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:02 crc kubenswrapper[4757]: I1006 13:39:02.769189 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:02 crc kubenswrapper[4757]: I1006 13:39:02.769213 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:02 crc kubenswrapper[4757]: I1006 13:39:02.769230 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:02Z","lastTransitionTime":"2025-10-06T13:39:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:02 crc kubenswrapper[4757]: I1006 13:39:02.873619 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:02 crc kubenswrapper[4757]: I1006 13:39:02.873680 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:02 crc kubenswrapper[4757]: I1006 13:39:02.873700 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:02 crc kubenswrapper[4757]: I1006 13:39:02.873724 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:02 crc kubenswrapper[4757]: I1006 13:39:02.873740 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:02Z","lastTransitionTime":"2025-10-06T13:39:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:02 crc kubenswrapper[4757]: I1006 13:39:02.976866 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:02 crc kubenswrapper[4757]: I1006 13:39:02.976923 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:02 crc kubenswrapper[4757]: I1006 13:39:02.976938 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:02 crc kubenswrapper[4757]: I1006 13:39:02.976961 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:02 crc kubenswrapper[4757]: I1006 13:39:02.976978 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:02Z","lastTransitionTime":"2025-10-06T13:39:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:03 crc kubenswrapper[4757]: I1006 13:39:03.080530 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:03 crc kubenswrapper[4757]: I1006 13:39:03.080610 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:03 crc kubenswrapper[4757]: I1006 13:39:03.080647 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:03 crc kubenswrapper[4757]: I1006 13:39:03.080679 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:03 crc kubenswrapper[4757]: I1006 13:39:03.080702 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:03Z","lastTransitionTime":"2025-10-06T13:39:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:03 crc kubenswrapper[4757]: I1006 13:39:03.179393 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 06 13:39:03 crc kubenswrapper[4757]: E1006 13:39:03.179697 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 06 13:39:03 crc kubenswrapper[4757]: I1006 13:39:03.184200 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:03 crc kubenswrapper[4757]: I1006 13:39:03.184257 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:03 crc kubenswrapper[4757]: I1006 13:39:03.184275 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:03 crc kubenswrapper[4757]: I1006 13:39:03.184302 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:03 crc kubenswrapper[4757]: I1006 13:39:03.184320 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:03Z","lastTransitionTime":"2025-10-06T13:39:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:03 crc kubenswrapper[4757]: I1006 13:39:03.287570 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:03 crc kubenswrapper[4757]: I1006 13:39:03.287624 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:03 crc kubenswrapper[4757]: I1006 13:39:03.287642 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:03 crc kubenswrapper[4757]: I1006 13:39:03.287666 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:03 crc kubenswrapper[4757]: I1006 13:39:03.287682 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:03Z","lastTransitionTime":"2025-10-06T13:39:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:03 crc kubenswrapper[4757]: I1006 13:39:03.390843 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:03 crc kubenswrapper[4757]: I1006 13:39:03.390900 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:03 crc kubenswrapper[4757]: I1006 13:39:03.390917 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:03 crc kubenswrapper[4757]: I1006 13:39:03.390941 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:03 crc kubenswrapper[4757]: I1006 13:39:03.390962 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:03Z","lastTransitionTime":"2025-10-06T13:39:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:03 crc kubenswrapper[4757]: I1006 13:39:03.420268 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-58bhb_a6624d05-e024-49f2-bf87-33e7ea4fccbb/ovnkube-controller/0.log" Oct 06 13:39:03 crc kubenswrapper[4757]: I1006 13:39:03.424813 4757 generic.go:334] "Generic (PLEG): container finished" podID="a6624d05-e024-49f2-bf87-33e7ea4fccbb" containerID="a1f6ccb718b501c86d747613e8beb82873d91c5d7f424ce8b7ad45fe99ad0b63" exitCode=1 Oct 06 13:39:03 crc kubenswrapper[4757]: I1006 13:39:03.424866 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" event={"ID":"a6624d05-e024-49f2-bf87-33e7ea4fccbb","Type":"ContainerDied","Data":"a1f6ccb718b501c86d747613e8beb82873d91c5d7f424ce8b7ad45fe99ad0b63"} Oct 06 13:39:03 crc kubenswrapper[4757]: I1006 13:39:03.426033 4757 scope.go:117] "RemoveContainer" containerID="a1f6ccb718b501c86d747613e8beb82873d91c5d7f424ce8b7ad45fe99ad0b63" Oct 06 13:39:03 crc kubenswrapper[4757]: I1006 13:39:03.449791 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aab0f26-c4f7-40b2-892f-0c7d8586a1c1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4119269e9f663bc5c38f4fdc8a5b32f1c3d8446fed6fd36172c41207c9c287b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1bef054ed2b7c27f200f7ab089158ba9946ca17b507c85c377504f539812fcf6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb90630132db1396d9009fe35d6d6b8ab6afe59d32f196cf7a10f3179103227\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://123bbe58d8664c8fad1b86c9c321228f7ffe7cdf14c44e1d9be7802e10a3f0d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://813c083e0e8ba3b397a4b2795de1eac82299d7f673832272138f20162b7f2db2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-06T13:38:46Z\\\",\\\"message\\\":\\\"W1006 13:38:35.655009 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1006 13:38:35.655588 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759757915 cert, and key in /tmp/serving-cert-2033919833/serving-signer.crt, /tmp/serving-cert-2033919833/serving-signer.key\\\\nI1006 13:38:35.906575 1 observer_polling.go:159] Starting file observer\\\\nW1006 13:38:35.908632 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1006 13:38:35.908812 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1006 13:38:35.911572 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2033919833/tls.crt::/tmp/serving-cert-2033919833/tls.key\\\\\\\"\\\\nF1006 13:38:46.393904 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:35Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18af66d22d5905beeec34c4d158a88aadb1a8076e423996cd42e4172a31426e0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:35Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca584765a9838801f99c7fb1f7bbcc740e684033b1b19035ef95b72526c6eda1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca584765a9838801f99c7fb1f7bbcc740e684033b1b19035ef95b72526c6eda1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:32Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:03Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:03 crc kubenswrapper[4757]: I1006 13:39:03.472456 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d100743e6a2245c99a227c3380c1e9cbdfec0eff361d21977a3e313fa8f7bfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:03Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:03 crc kubenswrapper[4757]: I1006 13:39:03.495541 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:03 crc kubenswrapper[4757]: I1006 13:39:03.495621 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:03 crc kubenswrapper[4757]: I1006 13:39:03.495645 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:03 crc kubenswrapper[4757]: I1006 13:39:03.495675 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:03 crc kubenswrapper[4757]: I1006 13:39:03.495705 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:03Z","lastTransitionTime":"2025-10-06T13:39:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:03 crc kubenswrapper[4757]: I1006 13:39:03.495558 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cd6138e-cd0c-4140-8173-eeb737e5b20a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e5392ae609235f0d0526e4d115125c20a75af131b89c07ae6bfa3b4bac84108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d06bc2afab6df22e2c8140de3e6288a3e5f4190b6bdb31a2e71170275e70cae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://056d1a4bdaee6e39a0ba0105c9b48d23cd37c37936951e8e2aa123f6de93463d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e08edbfb6418e26eb6c3ef0d229a623bc625e8e20c29d41a95adc8ac8dc2534\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:32Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:03Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:03 crc kubenswrapper[4757]: I1006 13:39:03.517839 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0010c888-d5ad-4b2b-8309-1647fdf0dee3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a2028c8aaf6b1504a3b00edc15272958a5254f6e621734988e959fe0720f1ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kvf5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://151c97566cda63000d69b51293c84fad70c6e331d3a1b62bd3a03ee5732d5214\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kvf5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-7tb7h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:03Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:03 crc kubenswrapper[4757]: I1006 13:39:03.539738 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9qf7z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9144d9fd-70d7-4a29-8e6b-c020c611980a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7771512cbe14fa9b8d61be3a354a5fb8436b84ddf455ae4426440f1599f7ad90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mf4fb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9qf7z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:03Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:03 crc kubenswrapper[4757]: I1006 13:39:03.568337 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rhrzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62d4cce6-0583-40a6-b7ea-2996d07b49b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://886c6f797531b702123d9931997b7b5e3ebcf9c14c566ff90e0382eae806f925\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a6cfbbedbad94b500e9bc7c5777459ffbf3db038e8a520a2c9c28d381955df55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a6cfbbedbad94b500e9bc7c5777459ffbf3db038e8a520a2c9c28d381955df55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8c16d3a08acde90ccf1beb5e9ced373395af7436ddbc05c24f6489f6f49fc75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8c16d3a08acde90ccf1beb5e9ced373395af7436ddbc05c24f6489f6f49fc75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7471a1d670fcee421cb43e0a8b8a2b5e15c9d428659f8d63be54895493225242\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7471a1d670fcee421cb43e0a8b8a2b5e15c9d428659f8d63be54895493225242\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d5889f0b8d556cbb5791eb0d9d5475c4fa8beb0c5e9614081c8a7002353282a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d5889f0b8d556cbb5791eb0d9d5475c4fa8beb0c5e9614081c8a7002353282a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0e664e1241d006c37ace1635ef93d30b3062960868874ab67b024d872e8b352\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0e664e1241d006c37ace1635ef93d30b3062960868874ab67b024d872e8b352\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4cbf946357ea3bc8763e1c1b16d8794976662c55043426132f1ece7c52dca82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4cbf946357ea3bc8763e1c1b16d8794976662c55043426132f1ece7c52dca82\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rhrzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:03Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:03 crc kubenswrapper[4757]: I1006 13:39:03.583803 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-j889t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a097c3c1-7cfd-4ba8-878d-40b971843e92\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cf3eee034e9bc6cc2729e4cd23569f535ea8d2058f1576f75baaa5cf8407f2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-29wtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:54Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-j889t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:03Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:03 crc kubenswrapper[4757]: I1006 13:39:03.598547 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:03 crc kubenswrapper[4757]: I1006 13:39:03.598665 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:03 crc kubenswrapper[4757]: I1006 13:39:03.598691 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:03 crc kubenswrapper[4757]: I1006 13:39:03.598722 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:03 crc kubenswrapper[4757]: I1006 13:39:03.598745 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:03Z","lastTransitionTime":"2025-10-06T13:39:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:03 crc kubenswrapper[4757]: I1006 13:39:03.603245 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b45680ebd17f15c56456a2c0eb5c7b58f6f42524be18df11dacbb3f4d904eac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6454ffe81c62fdfd6a5c0ee4aff48e55b1dd83b14b07a93f915fc3023f6bd255\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:03Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:03 crc kubenswrapper[4757]: I1006 13:39:03.622374 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:03Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:03 crc kubenswrapper[4757]: I1006 13:39:03.638630 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:03Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:03 crc kubenswrapper[4757]: I1006 13:39:03.655693 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9j5jn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de18b9fe-e396-469e-a6f6-d87ce91f3270\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4b5ed23b64e4bbc9f76f8cdef77d7f608d8ba029b540b69f64faf266f843155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2cslg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9j5jn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:03Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:03 crc kubenswrapper[4757]: I1006 13:39:03.672647 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70151c959d98e2cb056e57e50b67c68b1c031beb375b357c8941bc9b81dc040f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:03Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:03 crc kubenswrapper[4757]: I1006 13:39:03.692031 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:03Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:03 crc kubenswrapper[4757]: I1006 13:39:03.701538 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:03 crc kubenswrapper[4757]: I1006 13:39:03.701587 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:03 crc kubenswrapper[4757]: I1006 13:39:03.701600 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:03 crc kubenswrapper[4757]: I1006 13:39:03.701619 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:03 crc kubenswrapper[4757]: I1006 13:39:03.701631 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:03Z","lastTransitionTime":"2025-10-06T13:39:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:03 crc kubenswrapper[4757]: I1006 13:39:03.715749 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6624d05-e024-49f2-bf87-33e7ea4fccbb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbe06e8892901366539846fa68ea4e379c32ca7b21843383dc4a72ac70f92b98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75c8038f49690378c7322e4d2e50f7d5073bb744c3f64317b7c3f4acffaa338d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://df2b012b79afa6379b4a7add72a9093bbdbfcc8d618d6eae350a773fdddacfa8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4093bde478f4910c2b1e2341bb0b3f640454b131e31b9ab7ad5900b8260b7561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1580d51cb7ec22f70787157dc80236a312eca77785329c663d78c02ee9a0685d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38df7deb98a16e465d5fdbad8e04c2e62c5cfb418a48fe9f01da335dba2907ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1f6ccb718b501c86d747613e8beb82873d91c5d7f424ce8b7ad45fe99ad0b63\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1f6ccb718b501c86d747613e8beb82873d91c5d7f424ce8b7ad45fe99ad0b63\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-06T13:39:02Z\\\",\\\"message\\\":\\\"\\\\nI1006 13:39:02.545457 6059 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1006 13:39:02.545505 6059 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1006 13:39:02.545542 6059 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1006 13:39:02.545546 6059 handler.go:208] Removed *v1.Node event handler 2\\\\nI1006 13:39:02.545561 6059 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1006 13:39:02.545577 6059 handler.go:208] Removed *v1.Node event handler 7\\\\nI1006 13:39:02.545658 6059 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1006 13:39:02.545707 6059 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1006 13:39:02.545746 6059 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1006 13:39:02.545783 6059 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1006 13:39:02.545830 6059 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1006 13:39:02.545865 6059 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1006 13:39:02.545954 6059 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1006 13:39:02.545975 6059 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1006 13:39:02.546161 6059 factory.go:656] Stopping watch factory\\\\nI1006 13:39:02.546237 6059 ovnkube.go:599] Stopped ovnkube\\\\nI1006 1\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5852bc46ae07b295a97cca9f5ef7d06484c12744c0f6ee57b2d14acee319fcaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3fe5f1f425ddd770c518939dee061dc2641c1ca6c3538d8d30ac0724a6353936\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3fe5f1f425ddd770c518939dee061dc2641c1ca6c3538d8d30ac0724a6353936\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-58bhb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:03Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:03 crc kubenswrapper[4757]: I1006 13:39:03.804855 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:03 crc kubenswrapper[4757]: I1006 13:39:03.804924 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:03 crc kubenswrapper[4757]: I1006 13:39:03.804950 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:03 crc kubenswrapper[4757]: I1006 13:39:03.804982 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:03 crc kubenswrapper[4757]: I1006 13:39:03.805006 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:03Z","lastTransitionTime":"2025-10-06T13:39:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:03 crc kubenswrapper[4757]: I1006 13:39:03.915303 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:03 crc kubenswrapper[4757]: I1006 13:39:03.915419 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:03 crc kubenswrapper[4757]: I1006 13:39:03.915443 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:03 crc kubenswrapper[4757]: I1006 13:39:03.915469 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:03 crc kubenswrapper[4757]: I1006 13:39:03.915516 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:03Z","lastTransitionTime":"2025-10-06T13:39:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:04 crc kubenswrapper[4757]: I1006 13:39:04.017954 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:04 crc kubenswrapper[4757]: I1006 13:39:04.018018 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:04 crc kubenswrapper[4757]: I1006 13:39:04.018035 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:04 crc kubenswrapper[4757]: I1006 13:39:04.018060 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:04 crc kubenswrapper[4757]: I1006 13:39:04.018079 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:04Z","lastTransitionTime":"2025-10-06T13:39:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:04 crc kubenswrapper[4757]: I1006 13:39:04.121005 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:04 crc kubenswrapper[4757]: I1006 13:39:04.121047 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:04 crc kubenswrapper[4757]: I1006 13:39:04.121057 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:04 crc kubenswrapper[4757]: I1006 13:39:04.121081 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:04 crc kubenswrapper[4757]: I1006 13:39:04.121111 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:04Z","lastTransitionTime":"2025-10-06T13:39:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:04 crc kubenswrapper[4757]: I1006 13:39:04.179177 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 06 13:39:04 crc kubenswrapper[4757]: I1006 13:39:04.179238 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 06 13:39:04 crc kubenswrapper[4757]: E1006 13:39:04.179319 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 06 13:39:04 crc kubenswrapper[4757]: E1006 13:39:04.179392 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 06 13:39:04 crc kubenswrapper[4757]: I1006 13:39:04.223903 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:04 crc kubenswrapper[4757]: I1006 13:39:04.223976 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:04 crc kubenswrapper[4757]: I1006 13:39:04.223999 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:04 crc kubenswrapper[4757]: I1006 13:39:04.224028 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:04 crc kubenswrapper[4757]: I1006 13:39:04.224043 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:04Z","lastTransitionTime":"2025-10-06T13:39:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:04 crc kubenswrapper[4757]: I1006 13:39:04.327613 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:04 crc kubenswrapper[4757]: I1006 13:39:04.327658 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:04 crc kubenswrapper[4757]: I1006 13:39:04.327677 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:04 crc kubenswrapper[4757]: I1006 13:39:04.327695 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:04 crc kubenswrapper[4757]: I1006 13:39:04.327708 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:04Z","lastTransitionTime":"2025-10-06T13:39:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:04 crc kubenswrapper[4757]: I1006 13:39:04.430059 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:04 crc kubenswrapper[4757]: I1006 13:39:04.430819 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:04 crc kubenswrapper[4757]: I1006 13:39:04.430850 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:04 crc kubenswrapper[4757]: I1006 13:39:04.430869 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:04 crc kubenswrapper[4757]: I1006 13:39:04.430881 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:04Z","lastTransitionTime":"2025-10-06T13:39:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:04 crc kubenswrapper[4757]: I1006 13:39:04.432991 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-58bhb_a6624d05-e024-49f2-bf87-33e7ea4fccbb/ovnkube-controller/0.log" Oct 06 13:39:04 crc kubenswrapper[4757]: I1006 13:39:04.436380 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" event={"ID":"a6624d05-e024-49f2-bf87-33e7ea4fccbb","Type":"ContainerStarted","Data":"81720dfd043047aefa11096d2632cf7b232bb217cf2f674170f00aaeeb811f64"} Oct 06 13:39:04 crc kubenswrapper[4757]: I1006 13:39:04.436899 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" Oct 06 13:39:04 crc kubenswrapper[4757]: I1006 13:39:04.459141 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:04Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:04 crc kubenswrapper[4757]: I1006 13:39:04.475439 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b45680ebd17f15c56456a2c0eb5c7b58f6f42524be18df11dacbb3f4d904eac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6454ffe81c62fdfd6a5c0ee4aff48e55b1dd83b14b07a93f915fc3023f6bd255\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:04Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:04 crc kubenswrapper[4757]: I1006 13:39:04.492331 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:04Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:04 crc kubenswrapper[4757]: I1006 13:39:04.507965 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9j5jn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de18b9fe-e396-469e-a6f6-d87ce91f3270\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4b5ed23b64e4bbc9f76f8cdef77d7f608d8ba029b540b69f64faf266f843155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2cslg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9j5jn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:04Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:04 crc kubenswrapper[4757]: I1006 13:39:04.522000 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70151c959d98e2cb056e57e50b67c68b1c031beb375b357c8941bc9b81dc040f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:04Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:04 crc kubenswrapper[4757]: I1006 13:39:04.533129 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:04 crc kubenswrapper[4757]: I1006 13:39:04.533706 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:04 crc kubenswrapper[4757]: I1006 13:39:04.533947 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:04 crc kubenswrapper[4757]: I1006 13:39:04.534063 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:04 crc kubenswrapper[4757]: I1006 13:39:04.534169 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:04Z","lastTransitionTime":"2025-10-06T13:39:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:04 crc kubenswrapper[4757]: I1006 13:39:04.539377 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:04Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:04 crc kubenswrapper[4757]: I1006 13:39:04.566448 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6624d05-e024-49f2-bf87-33e7ea4fccbb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbe06e8892901366539846fa68ea4e379c32ca7b21843383dc4a72ac70f92b98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75c8038f49690378c7322e4d2e50f7d5073bb744c3f64317b7c3f4acffaa338d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://df2b012b79afa6379b4a7add72a9093bbdbfcc8d618d6eae350a773fdddacfa8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4093bde478f4910c2b1e2341bb0b3f640454b131e31b9ab7ad5900b8260b7561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1580d51cb7ec22f70787157dc80236a312eca77785329c663d78c02ee9a0685d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38df7deb98a16e465d5fdbad8e04c2e62c5cfb418a48fe9f01da335dba2907ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://81720dfd043047aefa11096d2632cf7b232bb217cf2f674170f00aaeeb811f64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1f6ccb718b501c86d747613e8beb82873d91c5d7f424ce8b7ad45fe99ad0b63\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-06T13:39:02Z\\\",\\\"message\\\":\\\"\\\\nI1006 13:39:02.545457 6059 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1006 13:39:02.545505 6059 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1006 13:39:02.545542 6059 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1006 13:39:02.545546 6059 handler.go:208] Removed *v1.Node event handler 2\\\\nI1006 13:39:02.545561 6059 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1006 13:39:02.545577 6059 handler.go:208] Removed *v1.Node event handler 7\\\\nI1006 13:39:02.545658 6059 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1006 13:39:02.545707 6059 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1006 13:39:02.545746 6059 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1006 13:39:02.545783 6059 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1006 13:39:02.545830 6059 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1006 13:39:02.545865 6059 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1006 13:39:02.545954 6059 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1006 13:39:02.545975 6059 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1006 13:39:02.546161 6059 factory.go:656] Stopping watch factory\\\\nI1006 13:39:02.546237 6059 ovnkube.go:599] Stopped ovnkube\\\\nI1006 1\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:59Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:39:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5852bc46ae07b295a97cca9f5ef7d06484c12744c0f6ee57b2d14acee319fcaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3fe5f1f425ddd770c518939dee061dc2641c1ca6c3538d8d30ac0724a6353936\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3fe5f1f425ddd770c518939dee061dc2641c1ca6c3538d8d30ac0724a6353936\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-58bhb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:04Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:04 crc kubenswrapper[4757]: I1006 13:39:04.585918 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aab0f26-c4f7-40b2-892f-0c7d8586a1c1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4119269e9f663bc5c38f4fdc8a5b32f1c3d8446fed6fd36172c41207c9c287b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1bef054ed2b7c27f200f7ab089158ba9946ca17b507c85c377504f539812fcf6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb90630132db1396d9009fe35d6d6b8ab6afe59d32f196cf7a10f3179103227\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://123bbe58d8664c8fad1b86c9c321228f7ffe7cdf14c44e1d9be7802e10a3f0d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://813c083e0e8ba3b397a4b2795de1eac82299d7f673832272138f20162b7f2db2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-06T13:38:46Z\\\",\\\"message\\\":\\\"W1006 13:38:35.655009 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1006 13:38:35.655588 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759757915 cert, and key in /tmp/serving-cert-2033919833/serving-signer.crt, /tmp/serving-cert-2033919833/serving-signer.key\\\\nI1006 13:38:35.906575 1 observer_polling.go:159] Starting file observer\\\\nW1006 13:38:35.908632 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1006 13:38:35.908812 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1006 13:38:35.911572 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2033919833/tls.crt::/tmp/serving-cert-2033919833/tls.key\\\\\\\"\\\\nF1006 13:38:46.393904 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:35Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18af66d22d5905beeec34c4d158a88aadb1a8076e423996cd42e4172a31426e0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:35Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca584765a9838801f99c7fb1f7bbcc740e684033b1b19035ef95b72526c6eda1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca584765a9838801f99c7fb1f7bbcc740e684033b1b19035ef95b72526c6eda1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:32Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:04Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:04 crc kubenswrapper[4757]: I1006 13:39:04.601948 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d100743e6a2245c99a227c3380c1e9cbdfec0eff361d21977a3e313fa8f7bfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:04Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:04 crc kubenswrapper[4757]: I1006 13:39:04.620409 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0010c888-d5ad-4b2b-8309-1647fdf0dee3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a2028c8aaf6b1504a3b00edc15272958a5254f6e621734988e959fe0720f1ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kvf5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://151c97566cda63000d69b51293c84fad70c6e331d3a1b62bd3a03ee5732d5214\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kvf5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-7tb7h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:04Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:04 crc kubenswrapper[4757]: I1006 13:39:04.636990 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:04 crc kubenswrapper[4757]: I1006 13:39:04.637049 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:04 crc kubenswrapper[4757]: I1006 13:39:04.637063 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:04 crc kubenswrapper[4757]: I1006 13:39:04.637082 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:04 crc kubenswrapper[4757]: I1006 13:39:04.637109 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:04Z","lastTransitionTime":"2025-10-06T13:39:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:04 crc kubenswrapper[4757]: I1006 13:39:04.640117 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9qf7z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9144d9fd-70d7-4a29-8e6b-c020c611980a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7771512cbe14fa9b8d61be3a354a5fb8436b84ddf455ae4426440f1599f7ad90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mf4fb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9qf7z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:04Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:04 crc kubenswrapper[4757]: I1006 13:39:04.659122 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rhrzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62d4cce6-0583-40a6-b7ea-2996d07b49b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://886c6f797531b702123d9931997b7b5e3ebcf9c14c566ff90e0382eae806f925\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a6cfbbedbad94b500e9bc7c5777459ffbf3db038e8a520a2c9c28d381955df55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a6cfbbedbad94b500e9bc7c5777459ffbf3db038e8a520a2c9c28d381955df55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8c16d3a08acde90ccf1beb5e9ced373395af7436ddbc05c24f6489f6f49fc75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8c16d3a08acde90ccf1beb5e9ced373395af7436ddbc05c24f6489f6f49fc75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7471a1d670fcee421cb43e0a8b8a2b5e15c9d428659f8d63be54895493225242\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7471a1d670fcee421cb43e0a8b8a2b5e15c9d428659f8d63be54895493225242\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d5889f0b8d556cbb5791eb0d9d5475c4fa8beb0c5e9614081c8a7002353282a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d5889f0b8d556cbb5791eb0d9d5475c4fa8beb0c5e9614081c8a7002353282a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0e664e1241d006c37ace1635ef93d30b3062960868874ab67b024d872e8b352\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0e664e1241d006c37ace1635ef93d30b3062960868874ab67b024d872e8b352\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4cbf946357ea3bc8763e1c1b16d8794976662c55043426132f1ece7c52dca82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4cbf946357ea3bc8763e1c1b16d8794976662c55043426132f1ece7c52dca82\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rhrzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:04Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:04 crc kubenswrapper[4757]: I1006 13:39:04.677029 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-j889t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a097c3c1-7cfd-4ba8-878d-40b971843e92\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cf3eee034e9bc6cc2729e4cd23569f535ea8d2058f1576f75baaa5cf8407f2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-29wtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:54Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-j889t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:04Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:04 crc kubenswrapper[4757]: I1006 13:39:04.697242 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cd6138e-cd0c-4140-8173-eeb737e5b20a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e5392ae609235f0d0526e4d115125c20a75af131b89c07ae6bfa3b4bac84108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d06bc2afab6df22e2c8140de3e6288a3e5f4190b6bdb31a2e71170275e70cae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://056d1a4bdaee6e39a0ba0105c9b48d23cd37c37936951e8e2aa123f6de93463d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e08edbfb6418e26eb6c3ef0d229a623bc625e8e20c29d41a95adc8ac8dc2534\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:32Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:04Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:04 crc kubenswrapper[4757]: I1006 13:39:04.757703 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:04 crc kubenswrapper[4757]: I1006 13:39:04.758267 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:04 crc kubenswrapper[4757]: I1006 13:39:04.758279 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:04 crc kubenswrapper[4757]: I1006 13:39:04.758334 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:04 crc kubenswrapper[4757]: I1006 13:39:04.758347 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:04Z","lastTransitionTime":"2025-10-06T13:39:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:04 crc kubenswrapper[4757]: I1006 13:39:04.861329 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:04 crc kubenswrapper[4757]: I1006 13:39:04.861370 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:04 crc kubenswrapper[4757]: I1006 13:39:04.861380 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:04 crc kubenswrapper[4757]: I1006 13:39:04.861408 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:04 crc kubenswrapper[4757]: I1006 13:39:04.861420 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:04Z","lastTransitionTime":"2025-10-06T13:39:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:04 crc kubenswrapper[4757]: I1006 13:39:04.964682 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:04 crc kubenswrapper[4757]: I1006 13:39:04.964745 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:04 crc kubenswrapper[4757]: I1006 13:39:04.964759 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:04 crc kubenswrapper[4757]: I1006 13:39:04.964784 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:04 crc kubenswrapper[4757]: I1006 13:39:04.964797 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:04Z","lastTransitionTime":"2025-10-06T13:39:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:05 crc kubenswrapper[4757]: I1006 13:39:05.068560 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:05 crc kubenswrapper[4757]: I1006 13:39:05.068631 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:05 crc kubenswrapper[4757]: I1006 13:39:05.068646 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:05 crc kubenswrapper[4757]: I1006 13:39:05.068675 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:05 crc kubenswrapper[4757]: I1006 13:39:05.068692 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:05Z","lastTransitionTime":"2025-10-06T13:39:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:05 crc kubenswrapper[4757]: I1006 13:39:05.170804 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:05 crc kubenswrapper[4757]: I1006 13:39:05.170873 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:05 crc kubenswrapper[4757]: I1006 13:39:05.170891 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:05 crc kubenswrapper[4757]: I1006 13:39:05.170915 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:05 crc kubenswrapper[4757]: I1006 13:39:05.170933 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:05Z","lastTransitionTime":"2025-10-06T13:39:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:05 crc kubenswrapper[4757]: I1006 13:39:05.179237 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 06 13:39:05 crc kubenswrapper[4757]: E1006 13:39:05.179464 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 06 13:39:05 crc kubenswrapper[4757]: I1006 13:39:05.273836 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:05 crc kubenswrapper[4757]: I1006 13:39:05.273881 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:05 crc kubenswrapper[4757]: I1006 13:39:05.273894 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:05 crc kubenswrapper[4757]: I1006 13:39:05.273912 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:05 crc kubenswrapper[4757]: I1006 13:39:05.273924 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:05Z","lastTransitionTime":"2025-10-06T13:39:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:05 crc kubenswrapper[4757]: I1006 13:39:05.377269 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:05 crc kubenswrapper[4757]: I1006 13:39:05.377332 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:05 crc kubenswrapper[4757]: I1006 13:39:05.377357 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:05 crc kubenswrapper[4757]: I1006 13:39:05.377393 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:05 crc kubenswrapper[4757]: I1006 13:39:05.377419 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:05Z","lastTransitionTime":"2025-10-06T13:39:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:05 crc kubenswrapper[4757]: I1006 13:39:05.444424 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-58bhb_a6624d05-e024-49f2-bf87-33e7ea4fccbb/ovnkube-controller/1.log" Oct 06 13:39:05 crc kubenswrapper[4757]: I1006 13:39:05.445641 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-58bhb_a6624d05-e024-49f2-bf87-33e7ea4fccbb/ovnkube-controller/0.log" Oct 06 13:39:05 crc kubenswrapper[4757]: I1006 13:39:05.450346 4757 generic.go:334] "Generic (PLEG): container finished" podID="a6624d05-e024-49f2-bf87-33e7ea4fccbb" containerID="81720dfd043047aefa11096d2632cf7b232bb217cf2f674170f00aaeeb811f64" exitCode=1 Oct 06 13:39:05 crc kubenswrapper[4757]: I1006 13:39:05.450391 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" event={"ID":"a6624d05-e024-49f2-bf87-33e7ea4fccbb","Type":"ContainerDied","Data":"81720dfd043047aefa11096d2632cf7b232bb217cf2f674170f00aaeeb811f64"} Oct 06 13:39:05 crc kubenswrapper[4757]: I1006 13:39:05.450436 4757 scope.go:117] "RemoveContainer" containerID="a1f6ccb718b501c86d747613e8beb82873d91c5d7f424ce8b7ad45fe99ad0b63" Oct 06 13:39:05 crc kubenswrapper[4757]: I1006 13:39:05.451831 4757 scope.go:117] "RemoveContainer" containerID="81720dfd043047aefa11096d2632cf7b232bb217cf2f674170f00aaeeb811f64" Oct 06 13:39:05 crc kubenswrapper[4757]: E1006 13:39:05.452250 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-58bhb_openshift-ovn-kubernetes(a6624d05-e024-49f2-bf87-33e7ea4fccbb)\"" pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" podUID="a6624d05-e024-49f2-bf87-33e7ea4fccbb" Oct 06 13:39:05 crc kubenswrapper[4757]: I1006 13:39:05.474584 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cd6138e-cd0c-4140-8173-eeb737e5b20a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e5392ae609235f0d0526e4d115125c20a75af131b89c07ae6bfa3b4bac84108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d06bc2afab6df22e2c8140de3e6288a3e5f4190b6bdb31a2e71170275e70cae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://056d1a4bdaee6e39a0ba0105c9b48d23cd37c37936951e8e2aa123f6de93463d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e08edbfb6418e26eb6c3ef0d229a623bc625e8e20c29d41a95adc8ac8dc2534\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:32Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:05Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:05 crc kubenswrapper[4757]: I1006 13:39:05.483007 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:05 crc kubenswrapper[4757]: I1006 13:39:05.483070 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:05 crc kubenswrapper[4757]: I1006 13:39:05.483088 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:05 crc kubenswrapper[4757]: I1006 13:39:05.483119 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:05 crc kubenswrapper[4757]: I1006 13:39:05.483164 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:05Z","lastTransitionTime":"2025-10-06T13:39:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:05 crc kubenswrapper[4757]: I1006 13:39:05.494894 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0010c888-d5ad-4b2b-8309-1647fdf0dee3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a2028c8aaf6b1504a3b00edc15272958a5254f6e621734988e959fe0720f1ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kvf5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://151c97566cda63000d69b51293c84fad70c6e331d3a1b62bd3a03ee5732d5214\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kvf5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-7tb7h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:05Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:05 crc kubenswrapper[4757]: I1006 13:39:05.502627 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l2rjj"] Oct 06 13:39:05 crc kubenswrapper[4757]: I1006 13:39:05.503625 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l2rjj" Oct 06 13:39:05 crc kubenswrapper[4757]: I1006 13:39:05.505909 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Oct 06 13:39:05 crc kubenswrapper[4757]: I1006 13:39:05.506854 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Oct 06 13:39:05 crc kubenswrapper[4757]: I1006 13:39:05.519304 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9qf7z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9144d9fd-70d7-4a29-8e6b-c020c611980a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7771512cbe14fa9b8d61be3a354a5fb8436b84ddf455ae4426440f1599f7ad90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mf4fb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9qf7z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:05Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:05 crc kubenswrapper[4757]: I1006 13:39:05.540682 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rhrzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62d4cce6-0583-40a6-b7ea-2996d07b49b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://886c6f797531b702123d9931997b7b5e3ebcf9c14c566ff90e0382eae806f925\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a6cfbbedbad94b500e9bc7c5777459ffbf3db038e8a520a2c9c28d381955df55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a6cfbbedbad94b500e9bc7c5777459ffbf3db038e8a520a2c9c28d381955df55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8c16d3a08acde90ccf1beb5e9ced373395af7436ddbc05c24f6489f6f49fc75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8c16d3a08acde90ccf1beb5e9ced373395af7436ddbc05c24f6489f6f49fc75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7471a1d670fcee421cb43e0a8b8a2b5e15c9d428659f8d63be54895493225242\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7471a1d670fcee421cb43e0a8b8a2b5e15c9d428659f8d63be54895493225242\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d5889f0b8d556cbb5791eb0d9d5475c4fa8beb0c5e9614081c8a7002353282a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d5889f0b8d556cbb5791eb0d9d5475c4fa8beb0c5e9614081c8a7002353282a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0e664e1241d006c37ace1635ef93d30b3062960868874ab67b024d872e8b352\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0e664e1241d006c37ace1635ef93d30b3062960868874ab67b024d872e8b352\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4cbf946357ea3bc8763e1c1b16d8794976662c55043426132f1ece7c52dca82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4cbf946357ea3bc8763e1c1b16d8794976662c55043426132f1ece7c52dca82\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rhrzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:05Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:05 crc kubenswrapper[4757]: I1006 13:39:05.553037 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-j889t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a097c3c1-7cfd-4ba8-878d-40b971843e92\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cf3eee034e9bc6cc2729e4cd23569f535ea8d2058f1576f75baaa5cf8407f2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-29wtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:54Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-j889t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:05Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:05 crc kubenswrapper[4757]: I1006 13:39:05.572832 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b45680ebd17f15c56456a2c0eb5c7b58f6f42524be18df11dacbb3f4d904eac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6454ffe81c62fdfd6a5c0ee4aff48e55b1dd83b14b07a93f915fc3023f6bd255\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:05Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:05 crc kubenswrapper[4757]: I1006 13:39:05.588418 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:05Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:05 crc kubenswrapper[4757]: I1006 13:39:05.588537 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:05 crc kubenswrapper[4757]: I1006 13:39:05.588588 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:05 crc kubenswrapper[4757]: I1006 13:39:05.588607 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:05 crc kubenswrapper[4757]: I1006 13:39:05.588630 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:05 crc kubenswrapper[4757]: I1006 13:39:05.588651 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:05Z","lastTransitionTime":"2025-10-06T13:39:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:05 crc kubenswrapper[4757]: I1006 13:39:05.608822 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:05Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:05 crc kubenswrapper[4757]: I1006 13:39:05.616028 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5csnl\" (UniqueName: \"kubernetes.io/projected/34af6a7e-1458-4f7c-bc54-69fb80966b6b-kube-api-access-5csnl\") pod \"ovnkube-control-plane-749d76644c-l2rjj\" (UID: \"34af6a7e-1458-4f7c-bc54-69fb80966b6b\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l2rjj" Oct 06 13:39:05 crc kubenswrapper[4757]: I1006 13:39:05.616149 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/34af6a7e-1458-4f7c-bc54-69fb80966b6b-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-l2rjj\" (UID: \"34af6a7e-1458-4f7c-bc54-69fb80966b6b\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l2rjj" Oct 06 13:39:05 crc kubenswrapper[4757]: I1006 13:39:05.616194 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/34af6a7e-1458-4f7c-bc54-69fb80966b6b-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-l2rjj\" (UID: \"34af6a7e-1458-4f7c-bc54-69fb80966b6b\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l2rjj" Oct 06 13:39:05 crc kubenswrapper[4757]: I1006 13:39:05.616244 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/34af6a7e-1458-4f7c-bc54-69fb80966b6b-env-overrides\") pod \"ovnkube-control-plane-749d76644c-l2rjj\" (UID: \"34af6a7e-1458-4f7c-bc54-69fb80966b6b\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l2rjj" Oct 06 13:39:05 crc kubenswrapper[4757]: I1006 13:39:05.626075 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9j5jn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de18b9fe-e396-469e-a6f6-d87ce91f3270\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4b5ed23b64e4bbc9f76f8cdef77d7f608d8ba029b540b69f64faf266f843155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2cslg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9j5jn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:05Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:05 crc kubenswrapper[4757]: I1006 13:39:05.643340 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70151c959d98e2cb056e57e50b67c68b1c031beb375b357c8941bc9b81dc040f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:05Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:05 crc kubenswrapper[4757]: I1006 13:39:05.657976 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:05Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:05 crc kubenswrapper[4757]: I1006 13:39:05.680314 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6624d05-e024-49f2-bf87-33e7ea4fccbb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbe06e8892901366539846fa68ea4e379c32ca7b21843383dc4a72ac70f92b98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75c8038f49690378c7322e4d2e50f7d5073bb744c3f64317b7c3f4acffaa338d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://df2b012b79afa6379b4a7add72a9093bbdbfcc8d618d6eae350a773fdddacfa8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4093bde478f4910c2b1e2341bb0b3f640454b131e31b9ab7ad5900b8260b7561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1580d51cb7ec22f70787157dc80236a312eca77785329c663d78c02ee9a0685d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38df7deb98a16e465d5fdbad8e04c2e62c5cfb418a48fe9f01da335dba2907ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://81720dfd043047aefa11096d2632cf7b232bb217cf2f674170f00aaeeb811f64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1f6ccb718b501c86d747613e8beb82873d91c5d7f424ce8b7ad45fe99ad0b63\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-06T13:39:02Z\\\",\\\"message\\\":\\\"\\\\nI1006 13:39:02.545457 6059 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1006 13:39:02.545505 6059 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1006 13:39:02.545542 6059 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1006 13:39:02.545546 6059 handler.go:208] Removed *v1.Node event handler 2\\\\nI1006 13:39:02.545561 6059 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1006 13:39:02.545577 6059 handler.go:208] Removed *v1.Node event handler 7\\\\nI1006 13:39:02.545658 6059 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1006 13:39:02.545707 6059 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1006 13:39:02.545746 6059 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1006 13:39:02.545783 6059 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1006 13:39:02.545830 6059 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1006 13:39:02.545865 6059 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1006 13:39:02.545954 6059 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1006 13:39:02.545975 6059 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1006 13:39:02.546161 6059 factory.go:656] Stopping watch factory\\\\nI1006 13:39:02.546237 6059 ovnkube.go:599] Stopped ovnkube\\\\nI1006 1\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:59Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81720dfd043047aefa11096d2632cf7b232bb217cf2f674170f00aaeeb811f64\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-06T13:39:04Z\\\",\\\"message\\\":\\\"\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1006 13:39:04.366323 6196 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-console/console\\\\\\\"}\\\\nI1006 13:39:04.366339 6196 services_controller.go:360] Finished syncing service console on namespace openshift-console for network=default : 2.436929ms\\\\nI1006 13:39:04.366598 6196 obj_retry.go:551] Creating *factory.egressNode crc took: 4.572966ms\\\\nI1006 13:39:04.366619 6196 factory.go:1336] Added *v1.Node event handler 7\\\\nI1006 13:39:04.366642 6196 factory.go:1336] Added *v1.EgressIP event handler 8\\\\nI1006 13:39:04.366868 6196 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI1006 13:39:04.366989 6196 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI1006 13:39:04.367026 6196 ovnkube.go:599] Stopped ovnkube\\\\nI1006 13:39:04.367053 6196 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1006 13:39:04.367182 6196 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: failed to add event handler: handler {0x1e60340 0x1e60020 0x1e5ffc0} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotatio\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-06T13:39:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5852bc46ae07b295a97cca9f5ef7d06484c12744c0f6ee57b2d14acee319fcaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3fe5f1f425ddd770c518939dee061dc2641c1ca6c3538d8d30ac0724a6353936\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3fe5f1f425ddd770c518939dee061dc2641c1ca6c3538d8d30ac0724a6353936\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-58bhb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:05Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:05 crc kubenswrapper[4757]: I1006 13:39:05.691738 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:05 crc kubenswrapper[4757]: I1006 13:39:05.691813 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:05 crc kubenswrapper[4757]: I1006 13:39:05.691836 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:05 crc kubenswrapper[4757]: I1006 13:39:05.691860 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:05 crc kubenswrapper[4757]: I1006 13:39:05.691878 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:05Z","lastTransitionTime":"2025-10-06T13:39:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:05 crc kubenswrapper[4757]: I1006 13:39:05.694840 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aab0f26-c4f7-40b2-892f-0c7d8586a1c1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4119269e9f663bc5c38f4fdc8a5b32f1c3d8446fed6fd36172c41207c9c287b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1bef054ed2b7c27f200f7ab089158ba9946ca17b507c85c377504f539812fcf6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb90630132db1396d9009fe35d6d6b8ab6afe59d32f196cf7a10f3179103227\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://123bbe58d8664c8fad1b86c9c321228f7ffe7cdf14c44e1d9be7802e10a3f0d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://813c083e0e8ba3b397a4b2795de1eac82299d7f673832272138f20162b7f2db2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-06T13:38:46Z\\\",\\\"message\\\":\\\"W1006 13:38:35.655009 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1006 13:38:35.655588 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759757915 cert, and key in /tmp/serving-cert-2033919833/serving-signer.crt, /tmp/serving-cert-2033919833/serving-signer.key\\\\nI1006 13:38:35.906575 1 observer_polling.go:159] Starting file observer\\\\nW1006 13:38:35.908632 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1006 13:38:35.908812 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1006 13:38:35.911572 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2033919833/tls.crt::/tmp/serving-cert-2033919833/tls.key\\\\\\\"\\\\nF1006 13:38:46.393904 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:35Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18af66d22d5905beeec34c4d158a88aadb1a8076e423996cd42e4172a31426e0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:35Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca584765a9838801f99c7fb1f7bbcc740e684033b1b19035ef95b72526c6eda1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca584765a9838801f99c7fb1f7bbcc740e684033b1b19035ef95b72526c6eda1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:32Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:05Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:05 crc kubenswrapper[4757]: I1006 13:39:05.709517 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d100743e6a2245c99a227c3380c1e9cbdfec0eff361d21977a3e313fa8f7bfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:05Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:05 crc kubenswrapper[4757]: I1006 13:39:05.717667 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/34af6a7e-1458-4f7c-bc54-69fb80966b6b-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-l2rjj\" (UID: \"34af6a7e-1458-4f7c-bc54-69fb80966b6b\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l2rjj" Oct 06 13:39:05 crc kubenswrapper[4757]: I1006 13:39:05.717808 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/34af6a7e-1458-4f7c-bc54-69fb80966b6b-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-l2rjj\" (UID: \"34af6a7e-1458-4f7c-bc54-69fb80966b6b\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l2rjj" Oct 06 13:39:05 crc kubenswrapper[4757]: I1006 13:39:05.717868 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/34af6a7e-1458-4f7c-bc54-69fb80966b6b-env-overrides\") pod \"ovnkube-control-plane-749d76644c-l2rjj\" (UID: \"34af6a7e-1458-4f7c-bc54-69fb80966b6b\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l2rjj" Oct 06 13:39:05 crc kubenswrapper[4757]: I1006 13:39:05.717932 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5csnl\" (UniqueName: \"kubernetes.io/projected/34af6a7e-1458-4f7c-bc54-69fb80966b6b-kube-api-access-5csnl\") pod \"ovnkube-control-plane-749d76644c-l2rjj\" (UID: \"34af6a7e-1458-4f7c-bc54-69fb80966b6b\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l2rjj" Oct 06 13:39:05 crc kubenswrapper[4757]: I1006 13:39:05.718646 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/34af6a7e-1458-4f7c-bc54-69fb80966b6b-env-overrides\") pod \"ovnkube-control-plane-749d76644c-l2rjj\" (UID: \"34af6a7e-1458-4f7c-bc54-69fb80966b6b\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l2rjj" Oct 06 13:39:05 crc kubenswrapper[4757]: I1006 13:39:05.718675 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/34af6a7e-1458-4f7c-bc54-69fb80966b6b-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-l2rjj\" (UID: \"34af6a7e-1458-4f7c-bc54-69fb80966b6b\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l2rjj" Oct 06 13:39:05 crc kubenswrapper[4757]: I1006 13:39:05.725685 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/34af6a7e-1458-4f7c-bc54-69fb80966b6b-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-l2rjj\" (UID: \"34af6a7e-1458-4f7c-bc54-69fb80966b6b\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l2rjj" Oct 06 13:39:05 crc kubenswrapper[4757]: I1006 13:39:05.728244 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aab0f26-c4f7-40b2-892f-0c7d8586a1c1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4119269e9f663bc5c38f4fdc8a5b32f1c3d8446fed6fd36172c41207c9c287b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1bef054ed2b7c27f200f7ab089158ba9946ca17b507c85c377504f539812fcf6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb90630132db1396d9009fe35d6d6b8ab6afe59d32f196cf7a10f3179103227\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://123bbe58d8664c8fad1b86c9c321228f7ffe7cdf14c44e1d9be7802e10a3f0d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://813c083e0e8ba3b397a4b2795de1eac82299d7f673832272138f20162b7f2db2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-06T13:38:46Z\\\",\\\"message\\\":\\\"W1006 13:38:35.655009 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1006 13:38:35.655588 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759757915 cert, and key in /tmp/serving-cert-2033919833/serving-signer.crt, /tmp/serving-cert-2033919833/serving-signer.key\\\\nI1006 13:38:35.906575 1 observer_polling.go:159] Starting file observer\\\\nW1006 13:38:35.908632 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1006 13:38:35.908812 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1006 13:38:35.911572 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2033919833/tls.crt::/tmp/serving-cert-2033919833/tls.key\\\\\\\"\\\\nF1006 13:38:46.393904 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:35Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18af66d22d5905beeec34c4d158a88aadb1a8076e423996cd42e4172a31426e0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:35Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca584765a9838801f99c7fb1f7bbcc740e684033b1b19035ef95b72526c6eda1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca584765a9838801f99c7fb1f7bbcc740e684033b1b19035ef95b72526c6eda1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:32Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:05Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:05 crc kubenswrapper[4757]: I1006 13:39:05.738726 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5csnl\" (UniqueName: \"kubernetes.io/projected/34af6a7e-1458-4f7c-bc54-69fb80966b6b-kube-api-access-5csnl\") pod \"ovnkube-control-plane-749d76644c-l2rjj\" (UID: \"34af6a7e-1458-4f7c-bc54-69fb80966b6b\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l2rjj" Oct 06 13:39:05 crc kubenswrapper[4757]: I1006 13:39:05.744981 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d100743e6a2245c99a227c3380c1e9cbdfec0eff361d21977a3e313fa8f7bfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:05Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:05 crc kubenswrapper[4757]: I1006 13:39:05.755201 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-j889t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a097c3c1-7cfd-4ba8-878d-40b971843e92\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cf3eee034e9bc6cc2729e4cd23569f535ea8d2058f1576f75baaa5cf8407f2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-29wtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:54Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-j889t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:05Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:05 crc kubenswrapper[4757]: I1006 13:39:05.765670 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l2rjj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"34af6a7e-1458-4f7c-bc54-69fb80966b6b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:05Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5csnl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5csnl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:39:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-l2rjj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:05Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:05 crc kubenswrapper[4757]: I1006 13:39:05.780096 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cd6138e-cd0c-4140-8173-eeb737e5b20a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e5392ae609235f0d0526e4d115125c20a75af131b89c07ae6bfa3b4bac84108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d06bc2afab6df22e2c8140de3e6288a3e5f4190b6bdb31a2e71170275e70cae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://056d1a4bdaee6e39a0ba0105c9b48d23cd37c37936951e8e2aa123f6de93463d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e08edbfb6418e26eb6c3ef0d229a623bc625e8e20c29d41a95adc8ac8dc2534\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:32Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:05Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:05 crc kubenswrapper[4757]: I1006 13:39:05.790817 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0010c888-d5ad-4b2b-8309-1647fdf0dee3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a2028c8aaf6b1504a3b00edc15272958a5254f6e621734988e959fe0720f1ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kvf5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://151c97566cda63000d69b51293c84fad70c6e331d3a1b62bd3a03ee5732d5214\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kvf5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-7tb7h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:05Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:05 crc kubenswrapper[4757]: I1006 13:39:05.794324 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:05 crc kubenswrapper[4757]: I1006 13:39:05.794437 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:05 crc kubenswrapper[4757]: I1006 13:39:05.794494 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:05 crc kubenswrapper[4757]: I1006 13:39:05.794584 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:05 crc kubenswrapper[4757]: I1006 13:39:05.794647 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:05Z","lastTransitionTime":"2025-10-06T13:39:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:05 crc kubenswrapper[4757]: I1006 13:39:05.804552 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9qf7z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9144d9fd-70d7-4a29-8e6b-c020c611980a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7771512cbe14fa9b8d61be3a354a5fb8436b84ddf455ae4426440f1599f7ad90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mf4fb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9qf7z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:05Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:05 crc kubenswrapper[4757]: I1006 13:39:05.822630 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rhrzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62d4cce6-0583-40a6-b7ea-2996d07b49b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://886c6f797531b702123d9931997b7b5e3ebcf9c14c566ff90e0382eae806f925\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a6cfbbedbad94b500e9bc7c5777459ffbf3db038e8a520a2c9c28d381955df55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a6cfbbedbad94b500e9bc7c5777459ffbf3db038e8a520a2c9c28d381955df55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8c16d3a08acde90ccf1beb5e9ced373395af7436ddbc05c24f6489f6f49fc75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8c16d3a08acde90ccf1beb5e9ced373395af7436ddbc05c24f6489f6f49fc75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7471a1d670fcee421cb43e0a8b8a2b5e15c9d428659f8d63be54895493225242\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7471a1d670fcee421cb43e0a8b8a2b5e15c9d428659f8d63be54895493225242\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d5889f0b8d556cbb5791eb0d9d5475c4fa8beb0c5e9614081c8a7002353282a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d5889f0b8d556cbb5791eb0d9d5475c4fa8beb0c5e9614081c8a7002353282a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0e664e1241d006c37ace1635ef93d30b3062960868874ab67b024d872e8b352\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0e664e1241d006c37ace1635ef93d30b3062960868874ab67b024d872e8b352\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4cbf946357ea3bc8763e1c1b16d8794976662c55043426132f1ece7c52dca82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4cbf946357ea3bc8763e1c1b16d8794976662c55043426132f1ece7c52dca82\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rhrzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:05Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:05 crc kubenswrapper[4757]: I1006 13:39:05.826321 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l2rjj" Oct 06 13:39:05 crc kubenswrapper[4757]: I1006 13:39:05.841334 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b45680ebd17f15c56456a2c0eb5c7b58f6f42524be18df11dacbb3f4d904eac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6454ffe81c62fdfd6a5c0ee4aff48e55b1dd83b14b07a93f915fc3023f6bd255\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:05Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:05 crc kubenswrapper[4757]: I1006 13:39:05.858468 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:05Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:05 crc kubenswrapper[4757]: I1006 13:39:05.877278 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6624d05-e024-49f2-bf87-33e7ea4fccbb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbe06e8892901366539846fa68ea4e379c32ca7b21843383dc4a72ac70f92b98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75c8038f49690378c7322e4d2e50f7d5073bb744c3f64317b7c3f4acffaa338d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://df2b012b79afa6379b4a7add72a9093bbdbfcc8d618d6eae350a773fdddacfa8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4093bde478f4910c2b1e2341bb0b3f640454b131e31b9ab7ad5900b8260b7561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1580d51cb7ec22f70787157dc80236a312eca77785329c663d78c02ee9a0685d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38df7deb98a16e465d5fdbad8e04c2e62c5cfb418a48fe9f01da335dba2907ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://81720dfd043047aefa11096d2632cf7b232bb217cf2f674170f00aaeeb811f64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1f6ccb718b501c86d747613e8beb82873d91c5d7f424ce8b7ad45fe99ad0b63\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-06T13:39:02Z\\\",\\\"message\\\":\\\"\\\\nI1006 13:39:02.545457 6059 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1006 13:39:02.545505 6059 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1006 13:39:02.545542 6059 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1006 13:39:02.545546 6059 handler.go:208] Removed *v1.Node event handler 2\\\\nI1006 13:39:02.545561 6059 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1006 13:39:02.545577 6059 handler.go:208] Removed *v1.Node event handler 7\\\\nI1006 13:39:02.545658 6059 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1006 13:39:02.545707 6059 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1006 13:39:02.545746 6059 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1006 13:39:02.545783 6059 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1006 13:39:02.545830 6059 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1006 13:39:02.545865 6059 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1006 13:39:02.545954 6059 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1006 13:39:02.545975 6059 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1006 13:39:02.546161 6059 factory.go:656] Stopping watch factory\\\\nI1006 13:39:02.546237 6059 ovnkube.go:599] Stopped ovnkube\\\\nI1006 1\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:59Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81720dfd043047aefa11096d2632cf7b232bb217cf2f674170f00aaeeb811f64\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-06T13:39:04Z\\\",\\\"message\\\":\\\"\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1006 13:39:04.366323 6196 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-console/console\\\\\\\"}\\\\nI1006 13:39:04.366339 6196 services_controller.go:360] Finished syncing service console on namespace openshift-console for network=default : 2.436929ms\\\\nI1006 13:39:04.366598 6196 obj_retry.go:551] Creating *factory.egressNode crc took: 4.572966ms\\\\nI1006 13:39:04.366619 6196 factory.go:1336] Added *v1.Node event handler 7\\\\nI1006 13:39:04.366642 6196 factory.go:1336] Added *v1.EgressIP event handler 8\\\\nI1006 13:39:04.366868 6196 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI1006 13:39:04.366989 6196 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI1006 13:39:04.367026 6196 ovnkube.go:599] Stopped ovnkube\\\\nI1006 13:39:04.367053 6196 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1006 13:39:04.367182 6196 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: failed to add event handler: handler {0x1e60340 0x1e60020 0x1e5ffc0} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotatio\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-06T13:39:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5852bc46ae07b295a97cca9f5ef7d06484c12744c0f6ee57b2d14acee319fcaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3fe5f1f425ddd770c518939dee061dc2641c1ca6c3538d8d30ac0724a6353936\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3fe5f1f425ddd770c518939dee061dc2641c1ca6c3538d8d30ac0724a6353936\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-58bhb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:05Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:05 crc kubenswrapper[4757]: I1006 13:39:05.899326 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:05 crc kubenswrapper[4757]: I1006 13:39:05.899422 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:05 crc kubenswrapper[4757]: I1006 13:39:05.899436 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:05 crc kubenswrapper[4757]: I1006 13:39:05.899456 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:05 crc kubenswrapper[4757]: I1006 13:39:05.899469 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:05Z","lastTransitionTime":"2025-10-06T13:39:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:05 crc kubenswrapper[4757]: I1006 13:39:05.901175 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:05Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:05 crc kubenswrapper[4757]: I1006 13:39:05.916077 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9j5jn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de18b9fe-e396-469e-a6f6-d87ce91f3270\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4b5ed23b64e4bbc9f76f8cdef77d7f608d8ba029b540b69f64faf266f843155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2cslg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9j5jn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:05Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:05 crc kubenswrapper[4757]: I1006 13:39:05.934564 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70151c959d98e2cb056e57e50b67c68b1c031beb375b357c8941bc9b81dc040f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:05Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:05 crc kubenswrapper[4757]: I1006 13:39:05.953946 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:05Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:06 crc kubenswrapper[4757]: I1006 13:39:06.002179 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:06 crc kubenswrapper[4757]: I1006 13:39:06.002217 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:06 crc kubenswrapper[4757]: I1006 13:39:06.002226 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:06 crc kubenswrapper[4757]: I1006 13:39:06.002241 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:06 crc kubenswrapper[4757]: I1006 13:39:06.002253 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:06Z","lastTransitionTime":"2025-10-06T13:39:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:06 crc kubenswrapper[4757]: I1006 13:39:06.105343 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:06 crc kubenswrapper[4757]: I1006 13:39:06.105388 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:06 crc kubenswrapper[4757]: I1006 13:39:06.105400 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:06 crc kubenswrapper[4757]: I1006 13:39:06.105417 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:06 crc kubenswrapper[4757]: I1006 13:39:06.105431 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:06Z","lastTransitionTime":"2025-10-06T13:39:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:06 crc kubenswrapper[4757]: I1006 13:39:06.180058 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 06 13:39:06 crc kubenswrapper[4757]: I1006 13:39:06.180135 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 06 13:39:06 crc kubenswrapper[4757]: E1006 13:39:06.180255 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 06 13:39:06 crc kubenswrapper[4757]: E1006 13:39:06.180373 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 06 13:39:06 crc kubenswrapper[4757]: I1006 13:39:06.207676 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:06 crc kubenswrapper[4757]: I1006 13:39:06.207726 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:06 crc kubenswrapper[4757]: I1006 13:39:06.207738 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:06 crc kubenswrapper[4757]: I1006 13:39:06.207758 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:06 crc kubenswrapper[4757]: I1006 13:39:06.207771 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:06Z","lastTransitionTime":"2025-10-06T13:39:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:06 crc kubenswrapper[4757]: I1006 13:39:06.233748 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-sc9qx"] Oct 06 13:39:06 crc kubenswrapper[4757]: I1006 13:39:06.234317 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-sc9qx" Oct 06 13:39:06 crc kubenswrapper[4757]: E1006 13:39:06.234384 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-sc9qx" podUID="8a0a24d2-8946-4710-91f2-cc59ecedb5e3" Oct 06 13:39:06 crc kubenswrapper[4757]: I1006 13:39:06.259852 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rhrzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62d4cce6-0583-40a6-b7ea-2996d07b49b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://886c6f797531b702123d9931997b7b5e3ebcf9c14c566ff90e0382eae806f925\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a6cfbbedbad94b500e9bc7c5777459ffbf3db038e8a520a2c9c28d381955df55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a6cfbbedbad94b500e9bc7c5777459ffbf3db038e8a520a2c9c28d381955df55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8c16d3a08acde90ccf1beb5e9ced373395af7436ddbc05c24f6489f6f49fc75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8c16d3a08acde90ccf1beb5e9ced373395af7436ddbc05c24f6489f6f49fc75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7471a1d670fcee421cb43e0a8b8a2b5e15c9d428659f8d63be54895493225242\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7471a1d670fcee421cb43e0a8b8a2b5e15c9d428659f8d63be54895493225242\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d5889f0b8d556cbb5791eb0d9d5475c4fa8beb0c5e9614081c8a7002353282a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d5889f0b8d556cbb5791eb0d9d5475c4fa8beb0c5e9614081c8a7002353282a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0e664e1241d006c37ace1635ef93d30b3062960868874ab67b024d872e8b352\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0e664e1241d006c37ace1635ef93d30b3062960868874ab67b024d872e8b352\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4cbf946357ea3bc8763e1c1b16d8794976662c55043426132f1ece7c52dca82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4cbf946357ea3bc8763e1c1b16d8794976662c55043426132f1ece7c52dca82\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rhrzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:06Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:06 crc kubenswrapper[4757]: I1006 13:39:06.271829 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-j889t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a097c3c1-7cfd-4ba8-878d-40b971843e92\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cf3eee034e9bc6cc2729e4cd23569f535ea8d2058f1576f75baaa5cf8407f2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-29wtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:54Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-j889t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:06Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:06 crc kubenswrapper[4757]: I1006 13:39:06.290910 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l2rjj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"34af6a7e-1458-4f7c-bc54-69fb80966b6b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:05Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5csnl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5csnl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:39:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-l2rjj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:06Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:06 crc kubenswrapper[4757]: I1006 13:39:06.308793 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cd6138e-cd0c-4140-8173-eeb737e5b20a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e5392ae609235f0d0526e4d115125c20a75af131b89c07ae6bfa3b4bac84108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d06bc2afab6df22e2c8140de3e6288a3e5f4190b6bdb31a2e71170275e70cae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://056d1a4bdaee6e39a0ba0105c9b48d23cd37c37936951e8e2aa123f6de93463d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e08edbfb6418e26eb6c3ef0d229a623bc625e8e20c29d41a95adc8ac8dc2534\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:32Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:06Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:06 crc kubenswrapper[4757]: I1006 13:39:06.310214 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:06 crc kubenswrapper[4757]: I1006 13:39:06.310249 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:06 crc kubenswrapper[4757]: I1006 13:39:06.310260 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:06 crc kubenswrapper[4757]: I1006 13:39:06.310276 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:06 crc kubenswrapper[4757]: I1006 13:39:06.310288 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:06Z","lastTransitionTime":"2025-10-06T13:39:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:06 crc kubenswrapper[4757]: I1006 13:39:06.323432 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/8a0a24d2-8946-4710-91f2-cc59ecedb5e3-metrics-certs\") pod \"network-metrics-daemon-sc9qx\" (UID: \"8a0a24d2-8946-4710-91f2-cc59ecedb5e3\") " pod="openshift-multus/network-metrics-daemon-sc9qx" Oct 06 13:39:06 crc kubenswrapper[4757]: I1006 13:39:06.323520 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rwpmf\" (UniqueName: \"kubernetes.io/projected/8a0a24d2-8946-4710-91f2-cc59ecedb5e3-kube-api-access-rwpmf\") pod \"network-metrics-daemon-sc9qx\" (UID: \"8a0a24d2-8946-4710-91f2-cc59ecedb5e3\") " pod="openshift-multus/network-metrics-daemon-sc9qx" Oct 06 13:39:06 crc kubenswrapper[4757]: I1006 13:39:06.323619 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0010c888-d5ad-4b2b-8309-1647fdf0dee3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a2028c8aaf6b1504a3b00edc15272958a5254f6e621734988e959fe0720f1ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kvf5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://151c97566cda63000d69b51293c84fad70c6e331d3a1b62bd3a03ee5732d5214\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kvf5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-7tb7h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:06Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:06 crc kubenswrapper[4757]: I1006 13:39:06.345269 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9qf7z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9144d9fd-70d7-4a29-8e6b-c020c611980a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7771512cbe14fa9b8d61be3a354a5fb8436b84ddf455ae4426440f1599f7ad90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mf4fb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9qf7z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:06Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:06 crc kubenswrapper[4757]: I1006 13:39:06.370212 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b45680ebd17f15c56456a2c0eb5c7b58f6f42524be18df11dacbb3f4d904eac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6454ffe81c62fdfd6a5c0ee4aff48e55b1dd83b14b07a93f915fc3023f6bd255\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:06Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:06 crc kubenswrapper[4757]: I1006 13:39:06.382701 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:06Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:06 crc kubenswrapper[4757]: I1006 13:39:06.395981 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:06Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:06 crc kubenswrapper[4757]: I1006 13:39:06.412306 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:06 crc kubenswrapper[4757]: I1006 13:39:06.412352 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:06 crc kubenswrapper[4757]: I1006 13:39:06.412362 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:06 crc kubenswrapper[4757]: I1006 13:39:06.412377 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:06 crc kubenswrapper[4757]: I1006 13:39:06.412388 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:06Z","lastTransitionTime":"2025-10-06T13:39:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:06 crc kubenswrapper[4757]: I1006 13:39:06.422340 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6624d05-e024-49f2-bf87-33e7ea4fccbb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbe06e8892901366539846fa68ea4e379c32ca7b21843383dc4a72ac70f92b98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75c8038f49690378c7322e4d2e50f7d5073bb744c3f64317b7c3f4acffaa338d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://df2b012b79afa6379b4a7add72a9093bbdbfcc8d618d6eae350a773fdddacfa8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4093bde478f4910c2b1e2341bb0b3f640454b131e31b9ab7ad5900b8260b7561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1580d51cb7ec22f70787157dc80236a312eca77785329c663d78c02ee9a0685d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38df7deb98a16e465d5fdbad8e04c2e62c5cfb418a48fe9f01da335dba2907ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://81720dfd043047aefa11096d2632cf7b232bb217cf2f674170f00aaeeb811f64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1f6ccb718b501c86d747613e8beb82873d91c5d7f424ce8b7ad45fe99ad0b63\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-06T13:39:02Z\\\",\\\"message\\\":\\\"\\\\nI1006 13:39:02.545457 6059 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1006 13:39:02.545505 6059 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1006 13:39:02.545542 6059 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1006 13:39:02.545546 6059 handler.go:208] Removed *v1.Node event handler 2\\\\nI1006 13:39:02.545561 6059 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1006 13:39:02.545577 6059 handler.go:208] Removed *v1.Node event handler 7\\\\nI1006 13:39:02.545658 6059 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1006 13:39:02.545707 6059 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1006 13:39:02.545746 6059 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1006 13:39:02.545783 6059 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1006 13:39:02.545830 6059 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1006 13:39:02.545865 6059 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1006 13:39:02.545954 6059 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1006 13:39:02.545975 6059 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1006 13:39:02.546161 6059 factory.go:656] Stopping watch factory\\\\nI1006 13:39:02.546237 6059 ovnkube.go:599] Stopped ovnkube\\\\nI1006 1\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:59Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81720dfd043047aefa11096d2632cf7b232bb217cf2f674170f00aaeeb811f64\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-06T13:39:04Z\\\",\\\"message\\\":\\\"\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1006 13:39:04.366323 6196 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-console/console\\\\\\\"}\\\\nI1006 13:39:04.366339 6196 services_controller.go:360] Finished syncing service console on namespace openshift-console for network=default : 2.436929ms\\\\nI1006 13:39:04.366598 6196 obj_retry.go:551] Creating *factory.egressNode crc took: 4.572966ms\\\\nI1006 13:39:04.366619 6196 factory.go:1336] Added *v1.Node event handler 7\\\\nI1006 13:39:04.366642 6196 factory.go:1336] Added *v1.EgressIP event handler 8\\\\nI1006 13:39:04.366868 6196 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI1006 13:39:04.366989 6196 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI1006 13:39:04.367026 6196 ovnkube.go:599] Stopped ovnkube\\\\nI1006 13:39:04.367053 6196 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1006 13:39:04.367182 6196 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: failed to add event handler: handler {0x1e60340 0x1e60020 0x1e5ffc0} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotatio\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-06T13:39:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5852bc46ae07b295a97cca9f5ef7d06484c12744c0f6ee57b2d14acee319fcaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3fe5f1f425ddd770c518939dee061dc2641c1ca6c3538d8d30ac0724a6353936\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3fe5f1f425ddd770c518939dee061dc2641c1ca6c3538d8d30ac0724a6353936\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-58bhb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:06Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:06 crc kubenswrapper[4757]: I1006 13:39:06.424431 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/8a0a24d2-8946-4710-91f2-cc59ecedb5e3-metrics-certs\") pod \"network-metrics-daemon-sc9qx\" (UID: \"8a0a24d2-8946-4710-91f2-cc59ecedb5e3\") " pod="openshift-multus/network-metrics-daemon-sc9qx" Oct 06 13:39:06 crc kubenswrapper[4757]: I1006 13:39:06.424472 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rwpmf\" (UniqueName: \"kubernetes.io/projected/8a0a24d2-8946-4710-91f2-cc59ecedb5e3-kube-api-access-rwpmf\") pod \"network-metrics-daemon-sc9qx\" (UID: \"8a0a24d2-8946-4710-91f2-cc59ecedb5e3\") " pod="openshift-multus/network-metrics-daemon-sc9qx" Oct 06 13:39:06 crc kubenswrapper[4757]: E1006 13:39:06.425079 4757 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 06 13:39:06 crc kubenswrapper[4757]: E1006 13:39:06.425221 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/8a0a24d2-8946-4710-91f2-cc59ecedb5e3-metrics-certs podName:8a0a24d2-8946-4710-91f2-cc59ecedb5e3 nodeName:}" failed. No retries permitted until 2025-10-06 13:39:06.925196955 +0000 UTC m=+35.422515502 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/8a0a24d2-8946-4710-91f2-cc59ecedb5e3-metrics-certs") pod "network-metrics-daemon-sc9qx" (UID: "8a0a24d2-8946-4710-91f2-cc59ecedb5e3") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 06 13:39:06 crc kubenswrapper[4757]: I1006 13:39:06.433646 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-sc9qx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a0a24d2-8946-4710-91f2-cc59ecedb5e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwpmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwpmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:39:06Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-sc9qx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:06Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:06 crc kubenswrapper[4757]: I1006 13:39:06.445390 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rwpmf\" (UniqueName: \"kubernetes.io/projected/8a0a24d2-8946-4710-91f2-cc59ecedb5e3-kube-api-access-rwpmf\") pod \"network-metrics-daemon-sc9qx\" (UID: \"8a0a24d2-8946-4710-91f2-cc59ecedb5e3\") " pod="openshift-multus/network-metrics-daemon-sc9qx" Oct 06 13:39:06 crc kubenswrapper[4757]: I1006 13:39:06.446305 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:06Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:06 crc kubenswrapper[4757]: I1006 13:39:06.455558 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l2rjj" event={"ID":"34af6a7e-1458-4f7c-bc54-69fb80966b6b","Type":"ContainerStarted","Data":"a3d629cc4ee4a614d547fe962c1367b1717f185765e1ccc325fd941f37e3d656"} Oct 06 13:39:06 crc kubenswrapper[4757]: I1006 13:39:06.455609 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l2rjj" event={"ID":"34af6a7e-1458-4f7c-bc54-69fb80966b6b","Type":"ContainerStarted","Data":"53547fa1f6011c3003bed92bbd7200050777786dd1fe8b3979b8d9c695c7259d"} Oct 06 13:39:06 crc kubenswrapper[4757]: I1006 13:39:06.455621 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l2rjj" event={"ID":"34af6a7e-1458-4f7c-bc54-69fb80966b6b","Type":"ContainerStarted","Data":"e2476cbe24b87bedc78f371bbf824608fc6fd3286a6e35b5155baa7a836a6f29"} Oct 06 13:39:06 crc kubenswrapper[4757]: I1006 13:39:06.457044 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-58bhb_a6624d05-e024-49f2-bf87-33e7ea4fccbb/ovnkube-controller/1.log" Oct 06 13:39:06 crc kubenswrapper[4757]: I1006 13:39:06.460538 4757 scope.go:117] "RemoveContainer" containerID="81720dfd043047aefa11096d2632cf7b232bb217cf2f674170f00aaeeb811f64" Oct 06 13:39:06 crc kubenswrapper[4757]: E1006 13:39:06.460680 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-58bhb_openshift-ovn-kubernetes(a6624d05-e024-49f2-bf87-33e7ea4fccbb)\"" pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" podUID="a6624d05-e024-49f2-bf87-33e7ea4fccbb" Oct 06 13:39:06 crc kubenswrapper[4757]: I1006 13:39:06.460964 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9j5jn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de18b9fe-e396-469e-a6f6-d87ce91f3270\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4b5ed23b64e4bbc9f76f8cdef77d7f608d8ba029b540b69f64faf266f843155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2cslg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9j5jn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:06Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:06 crc kubenswrapper[4757]: I1006 13:39:06.472970 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70151c959d98e2cb056e57e50b67c68b1c031beb375b357c8941bc9b81dc040f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:06Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:06 crc kubenswrapper[4757]: I1006 13:39:06.487905 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aab0f26-c4f7-40b2-892f-0c7d8586a1c1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4119269e9f663bc5c38f4fdc8a5b32f1c3d8446fed6fd36172c41207c9c287b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1bef054ed2b7c27f200f7ab089158ba9946ca17b507c85c377504f539812fcf6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb90630132db1396d9009fe35d6d6b8ab6afe59d32f196cf7a10f3179103227\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://123bbe58d8664c8fad1b86c9c321228f7ffe7cdf14c44e1d9be7802e10a3f0d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://813c083e0e8ba3b397a4b2795de1eac82299d7f673832272138f20162b7f2db2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-06T13:38:46Z\\\",\\\"message\\\":\\\"W1006 13:38:35.655009 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1006 13:38:35.655588 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759757915 cert, and key in /tmp/serving-cert-2033919833/serving-signer.crt, /tmp/serving-cert-2033919833/serving-signer.key\\\\nI1006 13:38:35.906575 1 observer_polling.go:159] Starting file observer\\\\nW1006 13:38:35.908632 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1006 13:38:35.908812 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1006 13:38:35.911572 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2033919833/tls.crt::/tmp/serving-cert-2033919833/tls.key\\\\\\\"\\\\nF1006 13:38:46.393904 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:35Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18af66d22d5905beeec34c4d158a88aadb1a8076e423996cd42e4172a31426e0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:35Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca584765a9838801f99c7fb1f7bbcc740e684033b1b19035ef95b72526c6eda1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca584765a9838801f99c7fb1f7bbcc740e684033b1b19035ef95b72526c6eda1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:32Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:06Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:06 crc kubenswrapper[4757]: I1006 13:39:06.501163 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d100743e6a2245c99a227c3380c1e9cbdfec0eff361d21977a3e313fa8f7bfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:06Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:06 crc kubenswrapper[4757]: I1006 13:39:06.514365 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b45680ebd17f15c56456a2c0eb5c7b58f6f42524be18df11dacbb3f4d904eac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6454ffe81c62fdfd6a5c0ee4aff48e55b1dd83b14b07a93f915fc3023f6bd255\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:06Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:06 crc kubenswrapper[4757]: I1006 13:39:06.515045 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:06 crc kubenswrapper[4757]: I1006 13:39:06.515157 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:06 crc kubenswrapper[4757]: I1006 13:39:06.515182 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:06 crc kubenswrapper[4757]: I1006 13:39:06.515209 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:06 crc kubenswrapper[4757]: I1006 13:39:06.515226 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:06Z","lastTransitionTime":"2025-10-06T13:39:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:06 crc kubenswrapper[4757]: I1006 13:39:06.531193 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:06Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:06 crc kubenswrapper[4757]: I1006 13:39:06.545865 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:06Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:06 crc kubenswrapper[4757]: I1006 13:39:06.566045 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6624d05-e024-49f2-bf87-33e7ea4fccbb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbe06e8892901366539846fa68ea4e379c32ca7b21843383dc4a72ac70f92b98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75c8038f49690378c7322e4d2e50f7d5073bb744c3f64317b7c3f4acffaa338d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://df2b012b79afa6379b4a7add72a9093bbdbfcc8d618d6eae350a773fdddacfa8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4093bde478f4910c2b1e2341bb0b3f640454b131e31b9ab7ad5900b8260b7561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1580d51cb7ec22f70787157dc80236a312eca77785329c663d78c02ee9a0685d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38df7deb98a16e465d5fdbad8e04c2e62c5cfb418a48fe9f01da335dba2907ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://81720dfd043047aefa11096d2632cf7b232bb217cf2f674170f00aaeeb811f64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81720dfd043047aefa11096d2632cf7b232bb217cf2f674170f00aaeeb811f64\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-06T13:39:04Z\\\",\\\"message\\\":\\\"\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1006 13:39:04.366323 6196 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-console/console\\\\\\\"}\\\\nI1006 13:39:04.366339 6196 services_controller.go:360] Finished syncing service console on namespace openshift-console for network=default : 2.436929ms\\\\nI1006 13:39:04.366598 6196 obj_retry.go:551] Creating *factory.egressNode crc took: 4.572966ms\\\\nI1006 13:39:04.366619 6196 factory.go:1336] Added *v1.Node event handler 7\\\\nI1006 13:39:04.366642 6196 factory.go:1336] Added *v1.EgressIP event handler 8\\\\nI1006 13:39:04.366868 6196 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI1006 13:39:04.366989 6196 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI1006 13:39:04.367026 6196 ovnkube.go:599] Stopped ovnkube\\\\nI1006 13:39:04.367053 6196 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1006 13:39:04.367182 6196 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: failed to add event handler: handler {0x1e60340 0x1e60020 0x1e5ffc0} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotatio\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-06T13:39:03Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-58bhb_openshift-ovn-kubernetes(a6624d05-e024-49f2-bf87-33e7ea4fccbb)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5852bc46ae07b295a97cca9f5ef7d06484c12744c0f6ee57b2d14acee319fcaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3fe5f1f425ddd770c518939dee061dc2641c1ca6c3538d8d30ac0724a6353936\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3fe5f1f425ddd770c518939dee061dc2641c1ca6c3538d8d30ac0724a6353936\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-58bhb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:06Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:06 crc kubenswrapper[4757]: I1006 13:39:06.580276 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-sc9qx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a0a24d2-8946-4710-91f2-cc59ecedb5e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwpmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwpmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:39:06Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-sc9qx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:06Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:06 crc kubenswrapper[4757]: I1006 13:39:06.594234 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:06Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:06 crc kubenswrapper[4757]: I1006 13:39:06.609601 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9j5jn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de18b9fe-e396-469e-a6f6-d87ce91f3270\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4b5ed23b64e4bbc9f76f8cdef77d7f608d8ba029b540b69f64faf266f843155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2cslg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9j5jn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:06Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:06 crc kubenswrapper[4757]: I1006 13:39:06.618438 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:06 crc kubenswrapper[4757]: I1006 13:39:06.618473 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:06 crc kubenswrapper[4757]: I1006 13:39:06.618485 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:06 crc kubenswrapper[4757]: I1006 13:39:06.618502 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:06 crc kubenswrapper[4757]: I1006 13:39:06.618515 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:06Z","lastTransitionTime":"2025-10-06T13:39:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:06 crc kubenswrapper[4757]: I1006 13:39:06.626976 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70151c959d98e2cb056e57e50b67c68b1c031beb375b357c8941bc9b81dc040f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:06Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:06 crc kubenswrapper[4757]: I1006 13:39:06.646504 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aab0f26-c4f7-40b2-892f-0c7d8586a1c1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4119269e9f663bc5c38f4fdc8a5b32f1c3d8446fed6fd36172c41207c9c287b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1bef054ed2b7c27f200f7ab089158ba9946ca17b507c85c377504f539812fcf6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb90630132db1396d9009fe35d6d6b8ab6afe59d32f196cf7a10f3179103227\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://123bbe58d8664c8fad1b86c9c321228f7ffe7cdf14c44e1d9be7802e10a3f0d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://813c083e0e8ba3b397a4b2795de1eac82299d7f673832272138f20162b7f2db2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-06T13:38:46Z\\\",\\\"message\\\":\\\"W1006 13:38:35.655009 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1006 13:38:35.655588 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759757915 cert, and key in /tmp/serving-cert-2033919833/serving-signer.crt, /tmp/serving-cert-2033919833/serving-signer.key\\\\nI1006 13:38:35.906575 1 observer_polling.go:159] Starting file observer\\\\nW1006 13:38:35.908632 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1006 13:38:35.908812 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1006 13:38:35.911572 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2033919833/tls.crt::/tmp/serving-cert-2033919833/tls.key\\\\\\\"\\\\nF1006 13:38:46.393904 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:35Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18af66d22d5905beeec34c4d158a88aadb1a8076e423996cd42e4172a31426e0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:35Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca584765a9838801f99c7fb1f7bbcc740e684033b1b19035ef95b72526c6eda1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca584765a9838801f99c7fb1f7bbcc740e684033b1b19035ef95b72526c6eda1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:32Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:06Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:06 crc kubenswrapper[4757]: I1006 13:39:06.662515 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d100743e6a2245c99a227c3380c1e9cbdfec0eff361d21977a3e313fa8f7bfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:06Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:06 crc kubenswrapper[4757]: I1006 13:39:06.686445 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rhrzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62d4cce6-0583-40a6-b7ea-2996d07b49b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://886c6f797531b702123d9931997b7b5e3ebcf9c14c566ff90e0382eae806f925\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a6cfbbedbad94b500e9bc7c5777459ffbf3db038e8a520a2c9c28d381955df55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a6cfbbedbad94b500e9bc7c5777459ffbf3db038e8a520a2c9c28d381955df55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8c16d3a08acde90ccf1beb5e9ced373395af7436ddbc05c24f6489f6f49fc75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8c16d3a08acde90ccf1beb5e9ced373395af7436ddbc05c24f6489f6f49fc75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7471a1d670fcee421cb43e0a8b8a2b5e15c9d428659f8d63be54895493225242\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7471a1d670fcee421cb43e0a8b8a2b5e15c9d428659f8d63be54895493225242\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d5889f0b8d556cbb5791eb0d9d5475c4fa8beb0c5e9614081c8a7002353282a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d5889f0b8d556cbb5791eb0d9d5475c4fa8beb0c5e9614081c8a7002353282a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0e664e1241d006c37ace1635ef93d30b3062960868874ab67b024d872e8b352\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0e664e1241d006c37ace1635ef93d30b3062960868874ab67b024d872e8b352\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4cbf946357ea3bc8763e1c1b16d8794976662c55043426132f1ece7c52dca82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4cbf946357ea3bc8763e1c1b16d8794976662c55043426132f1ece7c52dca82\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rhrzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:06Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:06 crc kubenswrapper[4757]: I1006 13:39:06.700082 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-j889t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a097c3c1-7cfd-4ba8-878d-40b971843e92\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cf3eee034e9bc6cc2729e4cd23569f535ea8d2058f1576f75baaa5cf8407f2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-29wtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:54Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-j889t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:06Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:06 crc kubenswrapper[4757]: I1006 13:39:06.714145 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l2rjj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"34af6a7e-1458-4f7c-bc54-69fb80966b6b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://53547fa1f6011c3003bed92bbd7200050777786dd1fe8b3979b8d9c695c7259d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:39:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5csnl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3d629cc4ee4a614d547fe962c1367b1717f185765e1ccc325fd941f37e3d656\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:39:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5csnl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:39:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-l2rjj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:06Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:06 crc kubenswrapper[4757]: I1006 13:39:06.720842 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:06 crc kubenswrapper[4757]: I1006 13:39:06.720906 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:06 crc kubenswrapper[4757]: I1006 13:39:06.720923 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:06 crc kubenswrapper[4757]: I1006 13:39:06.720951 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:06 crc kubenswrapper[4757]: I1006 13:39:06.720968 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:06Z","lastTransitionTime":"2025-10-06T13:39:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:06 crc kubenswrapper[4757]: I1006 13:39:06.731098 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cd6138e-cd0c-4140-8173-eeb737e5b20a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e5392ae609235f0d0526e4d115125c20a75af131b89c07ae6bfa3b4bac84108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d06bc2afab6df22e2c8140de3e6288a3e5f4190b6bdb31a2e71170275e70cae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://056d1a4bdaee6e39a0ba0105c9b48d23cd37c37936951e8e2aa123f6de93463d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e08edbfb6418e26eb6c3ef0d229a623bc625e8e20c29d41a95adc8ac8dc2534\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:32Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:06Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:06 crc kubenswrapper[4757]: I1006 13:39:06.749938 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0010c888-d5ad-4b2b-8309-1647fdf0dee3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a2028c8aaf6b1504a3b00edc15272958a5254f6e621734988e959fe0720f1ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kvf5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://151c97566cda63000d69b51293c84fad70c6e331d3a1b62bd3a03ee5732d5214\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kvf5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-7tb7h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:06Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:06 crc kubenswrapper[4757]: I1006 13:39:06.763915 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9qf7z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9144d9fd-70d7-4a29-8e6b-c020c611980a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7771512cbe14fa9b8d61be3a354a5fb8436b84ddf455ae4426440f1599f7ad90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mf4fb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9qf7z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:06Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:06 crc kubenswrapper[4757]: I1006 13:39:06.823892 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:06 crc kubenswrapper[4757]: I1006 13:39:06.823947 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:06 crc kubenswrapper[4757]: I1006 13:39:06.823964 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:06 crc kubenswrapper[4757]: I1006 13:39:06.823994 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:06 crc kubenswrapper[4757]: I1006 13:39:06.824017 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:06Z","lastTransitionTime":"2025-10-06T13:39:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:06 crc kubenswrapper[4757]: I1006 13:39:06.927421 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:06 crc kubenswrapper[4757]: I1006 13:39:06.927497 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:06 crc kubenswrapper[4757]: I1006 13:39:06.927521 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:06 crc kubenswrapper[4757]: I1006 13:39:06.927546 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:06 crc kubenswrapper[4757]: I1006 13:39:06.927564 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:06Z","lastTransitionTime":"2025-10-06T13:39:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:06 crc kubenswrapper[4757]: I1006 13:39:06.931084 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/8a0a24d2-8946-4710-91f2-cc59ecedb5e3-metrics-certs\") pod \"network-metrics-daemon-sc9qx\" (UID: \"8a0a24d2-8946-4710-91f2-cc59ecedb5e3\") " pod="openshift-multus/network-metrics-daemon-sc9qx" Oct 06 13:39:06 crc kubenswrapper[4757]: E1006 13:39:06.931264 4757 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 06 13:39:06 crc kubenswrapper[4757]: E1006 13:39:06.931324 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/8a0a24d2-8946-4710-91f2-cc59ecedb5e3-metrics-certs podName:8a0a24d2-8946-4710-91f2-cc59ecedb5e3 nodeName:}" failed. No retries permitted until 2025-10-06 13:39:07.931309867 +0000 UTC m=+36.428628404 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/8a0a24d2-8946-4710-91f2-cc59ecedb5e3-metrics-certs") pod "network-metrics-daemon-sc9qx" (UID: "8a0a24d2-8946-4710-91f2-cc59ecedb5e3") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 06 13:39:07 crc kubenswrapper[4757]: I1006 13:39:07.030580 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:07 crc kubenswrapper[4757]: I1006 13:39:07.030667 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:07 crc kubenswrapper[4757]: I1006 13:39:07.030684 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:07 crc kubenswrapper[4757]: I1006 13:39:07.030713 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:07 crc kubenswrapper[4757]: I1006 13:39:07.030730 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:07Z","lastTransitionTime":"2025-10-06T13:39:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:07 crc kubenswrapper[4757]: I1006 13:39:07.134102 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:07 crc kubenswrapper[4757]: I1006 13:39:07.134209 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:07 crc kubenswrapper[4757]: I1006 13:39:07.134228 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:07 crc kubenswrapper[4757]: I1006 13:39:07.134303 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:07 crc kubenswrapper[4757]: I1006 13:39:07.134323 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:07Z","lastTransitionTime":"2025-10-06T13:39:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:07 crc kubenswrapper[4757]: I1006 13:39:07.180039 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 06 13:39:07 crc kubenswrapper[4757]: E1006 13:39:07.180259 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 06 13:39:07 crc kubenswrapper[4757]: I1006 13:39:07.238288 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:07 crc kubenswrapper[4757]: I1006 13:39:07.238333 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:07 crc kubenswrapper[4757]: I1006 13:39:07.238345 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:07 crc kubenswrapper[4757]: I1006 13:39:07.238361 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:07 crc kubenswrapper[4757]: I1006 13:39:07.238373 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:07Z","lastTransitionTime":"2025-10-06T13:39:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:07 crc kubenswrapper[4757]: I1006 13:39:07.341409 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:07 crc kubenswrapper[4757]: I1006 13:39:07.341492 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:07 crc kubenswrapper[4757]: I1006 13:39:07.341518 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:07 crc kubenswrapper[4757]: I1006 13:39:07.341545 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:07 crc kubenswrapper[4757]: I1006 13:39:07.341565 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:07Z","lastTransitionTime":"2025-10-06T13:39:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:07 crc kubenswrapper[4757]: I1006 13:39:07.444052 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:07 crc kubenswrapper[4757]: I1006 13:39:07.444487 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:07 crc kubenswrapper[4757]: I1006 13:39:07.444504 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:07 crc kubenswrapper[4757]: I1006 13:39:07.444526 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:07 crc kubenswrapper[4757]: I1006 13:39:07.444543 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:07Z","lastTransitionTime":"2025-10-06T13:39:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:07 crc kubenswrapper[4757]: I1006 13:39:07.548230 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:07 crc kubenswrapper[4757]: I1006 13:39:07.548307 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:07 crc kubenswrapper[4757]: I1006 13:39:07.548331 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:07 crc kubenswrapper[4757]: I1006 13:39:07.548361 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:07 crc kubenswrapper[4757]: I1006 13:39:07.548383 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:07Z","lastTransitionTime":"2025-10-06T13:39:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:07 crc kubenswrapper[4757]: I1006 13:39:07.651930 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:07 crc kubenswrapper[4757]: I1006 13:39:07.652005 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:07 crc kubenswrapper[4757]: I1006 13:39:07.652030 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:07 crc kubenswrapper[4757]: I1006 13:39:07.652059 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:07 crc kubenswrapper[4757]: I1006 13:39:07.652083 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:07Z","lastTransitionTime":"2025-10-06T13:39:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:07 crc kubenswrapper[4757]: I1006 13:39:07.755582 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:07 crc kubenswrapper[4757]: I1006 13:39:07.755621 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:07 crc kubenswrapper[4757]: I1006 13:39:07.755654 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:07 crc kubenswrapper[4757]: I1006 13:39:07.755670 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:07 crc kubenswrapper[4757]: I1006 13:39:07.755681 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:07Z","lastTransitionTime":"2025-10-06T13:39:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:07 crc kubenswrapper[4757]: I1006 13:39:07.858390 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:07 crc kubenswrapper[4757]: I1006 13:39:07.858439 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:07 crc kubenswrapper[4757]: I1006 13:39:07.858452 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:07 crc kubenswrapper[4757]: I1006 13:39:07.858467 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:07 crc kubenswrapper[4757]: I1006 13:39:07.858478 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:07Z","lastTransitionTime":"2025-10-06T13:39:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:07 crc kubenswrapper[4757]: I1006 13:39:07.941493 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 06 13:39:07 crc kubenswrapper[4757]: E1006 13:39:07.941690 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-06 13:39:23.941653693 +0000 UTC m=+52.438972230 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:39:07 crc kubenswrapper[4757]: I1006 13:39:07.941874 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 06 13:39:07 crc kubenswrapper[4757]: I1006 13:39:07.941914 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/8a0a24d2-8946-4710-91f2-cc59ecedb5e3-metrics-certs\") pod \"network-metrics-daemon-sc9qx\" (UID: \"8a0a24d2-8946-4710-91f2-cc59ecedb5e3\") " pod="openshift-multus/network-metrics-daemon-sc9qx" Oct 06 13:39:07 crc kubenswrapper[4757]: I1006 13:39:07.941962 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 06 13:39:07 crc kubenswrapper[4757]: E1006 13:39:07.942126 4757 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 06 13:39:07 crc kubenswrapper[4757]: E1006 13:39:07.942162 4757 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 06 13:39:07 crc kubenswrapper[4757]: E1006 13:39:07.942207 4757 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 06 13:39:07 crc kubenswrapper[4757]: E1006 13:39:07.942226 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-06 13:39:23.942199504 +0000 UTC m=+52.439518071 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 06 13:39:07 crc kubenswrapper[4757]: E1006 13:39:07.942304 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-06 13:39:23.942286368 +0000 UTC m=+52.439604985 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 06 13:39:07 crc kubenswrapper[4757]: E1006 13:39:07.942328 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/8a0a24d2-8946-4710-91f2-cc59ecedb5e3-metrics-certs podName:8a0a24d2-8946-4710-91f2-cc59ecedb5e3 nodeName:}" failed. No retries permitted until 2025-10-06 13:39:09.942318839 +0000 UTC m=+38.439637536 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/8a0a24d2-8946-4710-91f2-cc59ecedb5e3-metrics-certs") pod "network-metrics-daemon-sc9qx" (UID: "8a0a24d2-8946-4710-91f2-cc59ecedb5e3") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 06 13:39:07 crc kubenswrapper[4757]: I1006 13:39:07.961065 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:07 crc kubenswrapper[4757]: I1006 13:39:07.961099 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:07 crc kubenswrapper[4757]: I1006 13:39:07.961107 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:07 crc kubenswrapper[4757]: I1006 13:39:07.961121 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:07 crc kubenswrapper[4757]: I1006 13:39:07.961145 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:07Z","lastTransitionTime":"2025-10-06T13:39:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:08 crc kubenswrapper[4757]: I1006 13:39:08.043648 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 06 13:39:08 crc kubenswrapper[4757]: I1006 13:39:08.043777 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 06 13:39:08 crc kubenswrapper[4757]: E1006 13:39:08.043926 4757 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 06 13:39:08 crc kubenswrapper[4757]: E1006 13:39:08.043984 4757 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 06 13:39:08 crc kubenswrapper[4757]: E1006 13:39:08.044006 4757 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 06 13:39:08 crc kubenswrapper[4757]: E1006 13:39:08.044075 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-06 13:39:24.044051641 +0000 UTC m=+52.541370208 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 06 13:39:08 crc kubenswrapper[4757]: E1006 13:39:08.043926 4757 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 06 13:39:08 crc kubenswrapper[4757]: E1006 13:39:08.044143 4757 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 06 13:39:08 crc kubenswrapper[4757]: E1006 13:39:08.044161 4757 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 06 13:39:08 crc kubenswrapper[4757]: E1006 13:39:08.044214 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-06 13:39:24.044199687 +0000 UTC m=+52.541518254 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 06 13:39:08 crc kubenswrapper[4757]: I1006 13:39:08.062880 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:08 crc kubenswrapper[4757]: I1006 13:39:08.062938 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:08 crc kubenswrapper[4757]: I1006 13:39:08.062947 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:08 crc kubenswrapper[4757]: I1006 13:39:08.062961 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:08 crc kubenswrapper[4757]: I1006 13:39:08.062992 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:08Z","lastTransitionTime":"2025-10-06T13:39:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:08 crc kubenswrapper[4757]: I1006 13:39:08.165431 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:08 crc kubenswrapper[4757]: I1006 13:39:08.165512 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:08 crc kubenswrapper[4757]: I1006 13:39:08.165537 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:08 crc kubenswrapper[4757]: I1006 13:39:08.165568 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:08 crc kubenswrapper[4757]: I1006 13:39:08.165591 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:08Z","lastTransitionTime":"2025-10-06T13:39:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:08 crc kubenswrapper[4757]: I1006 13:39:08.179558 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 06 13:39:08 crc kubenswrapper[4757]: E1006 13:39:08.179669 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 06 13:39:08 crc kubenswrapper[4757]: I1006 13:39:08.179551 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-sc9qx" Oct 06 13:39:08 crc kubenswrapper[4757]: E1006 13:39:08.179759 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-sc9qx" podUID="8a0a24d2-8946-4710-91f2-cc59ecedb5e3" Oct 06 13:39:08 crc kubenswrapper[4757]: I1006 13:39:08.179554 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 06 13:39:08 crc kubenswrapper[4757]: E1006 13:39:08.179822 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 06 13:39:08 crc kubenswrapper[4757]: I1006 13:39:08.267886 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:08 crc kubenswrapper[4757]: I1006 13:39:08.267921 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:08 crc kubenswrapper[4757]: I1006 13:39:08.267931 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:08 crc kubenswrapper[4757]: I1006 13:39:08.267968 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:08 crc kubenswrapper[4757]: I1006 13:39:08.267978 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:08Z","lastTransitionTime":"2025-10-06T13:39:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:08 crc kubenswrapper[4757]: I1006 13:39:08.370879 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:08 crc kubenswrapper[4757]: I1006 13:39:08.370939 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:08 crc kubenswrapper[4757]: I1006 13:39:08.370955 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:08 crc kubenswrapper[4757]: I1006 13:39:08.370977 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:08 crc kubenswrapper[4757]: I1006 13:39:08.370992 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:08Z","lastTransitionTime":"2025-10-06T13:39:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:08 crc kubenswrapper[4757]: I1006 13:39:08.475621 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:08 crc kubenswrapper[4757]: I1006 13:39:08.475686 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:08 crc kubenswrapper[4757]: I1006 13:39:08.475705 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:08 crc kubenswrapper[4757]: I1006 13:39:08.475730 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:08 crc kubenswrapper[4757]: I1006 13:39:08.475754 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:08Z","lastTransitionTime":"2025-10-06T13:39:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:08 crc kubenswrapper[4757]: I1006 13:39:08.509471 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:08 crc kubenswrapper[4757]: I1006 13:39:08.509502 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:08 crc kubenswrapper[4757]: I1006 13:39:08.509512 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:08 crc kubenswrapper[4757]: I1006 13:39:08.509529 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:08 crc kubenswrapper[4757]: I1006 13:39:08.509541 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:08Z","lastTransitionTime":"2025-10-06T13:39:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:08 crc kubenswrapper[4757]: E1006 13:39:08.528794 4757 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:39:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:39:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:08Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:39:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:39:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:08Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e1ec82ec-57e6-47de-8235-c2486415aecd\\\",\\\"systemUUID\\\":\\\"77015af3-b2cf-40c4-8ed8-504c8efcff1f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:08Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:08 crc kubenswrapper[4757]: I1006 13:39:08.533854 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:08 crc kubenswrapper[4757]: I1006 13:39:08.533916 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:08 crc kubenswrapper[4757]: I1006 13:39:08.533935 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:08 crc kubenswrapper[4757]: I1006 13:39:08.533960 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:08 crc kubenswrapper[4757]: I1006 13:39:08.533977 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:08Z","lastTransitionTime":"2025-10-06T13:39:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:08 crc kubenswrapper[4757]: E1006 13:39:08.554502 4757 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:39:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:39:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:08Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:39:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:39:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:08Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e1ec82ec-57e6-47de-8235-c2486415aecd\\\",\\\"systemUUID\\\":\\\"77015af3-b2cf-40c4-8ed8-504c8efcff1f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:08Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:08 crc kubenswrapper[4757]: I1006 13:39:08.559339 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:08 crc kubenswrapper[4757]: I1006 13:39:08.559380 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:08 crc kubenswrapper[4757]: I1006 13:39:08.559394 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:08 crc kubenswrapper[4757]: I1006 13:39:08.559417 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:08 crc kubenswrapper[4757]: I1006 13:39:08.559433 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:08Z","lastTransitionTime":"2025-10-06T13:39:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:08 crc kubenswrapper[4757]: E1006 13:39:08.579328 4757 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:39:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:39:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:08Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:39:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:39:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:08Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e1ec82ec-57e6-47de-8235-c2486415aecd\\\",\\\"systemUUID\\\":\\\"77015af3-b2cf-40c4-8ed8-504c8efcff1f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:08Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:08 crc kubenswrapper[4757]: I1006 13:39:08.583530 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:08 crc kubenswrapper[4757]: I1006 13:39:08.583571 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:08 crc kubenswrapper[4757]: I1006 13:39:08.583586 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:08 crc kubenswrapper[4757]: I1006 13:39:08.583606 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:08 crc kubenswrapper[4757]: I1006 13:39:08.583622 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:08Z","lastTransitionTime":"2025-10-06T13:39:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:08 crc kubenswrapper[4757]: E1006 13:39:08.603494 4757 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:39:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:39:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:08Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:39:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:39:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:08Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e1ec82ec-57e6-47de-8235-c2486415aecd\\\",\\\"systemUUID\\\":\\\"77015af3-b2cf-40c4-8ed8-504c8efcff1f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:08Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:08 crc kubenswrapper[4757]: I1006 13:39:08.608512 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:08 crc kubenswrapper[4757]: I1006 13:39:08.608719 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:08 crc kubenswrapper[4757]: I1006 13:39:08.608858 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:08 crc kubenswrapper[4757]: I1006 13:39:08.609017 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:08 crc kubenswrapper[4757]: I1006 13:39:08.609173 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:08Z","lastTransitionTime":"2025-10-06T13:39:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:08 crc kubenswrapper[4757]: E1006 13:39:08.629358 4757 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:39:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:39:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:08Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:39:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:39:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:08Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e1ec82ec-57e6-47de-8235-c2486415aecd\\\",\\\"systemUUID\\\":\\\"77015af3-b2cf-40c4-8ed8-504c8efcff1f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:08Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:08 crc kubenswrapper[4757]: E1006 13:39:08.629636 4757 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 06 13:39:08 crc kubenswrapper[4757]: I1006 13:39:08.632088 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:08 crc kubenswrapper[4757]: I1006 13:39:08.632219 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:08 crc kubenswrapper[4757]: I1006 13:39:08.632246 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:08 crc kubenswrapper[4757]: I1006 13:39:08.632277 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:08 crc kubenswrapper[4757]: I1006 13:39:08.632302 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:08Z","lastTransitionTime":"2025-10-06T13:39:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:08 crc kubenswrapper[4757]: I1006 13:39:08.735978 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:08 crc kubenswrapper[4757]: I1006 13:39:08.736054 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:08 crc kubenswrapper[4757]: I1006 13:39:08.736073 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:08 crc kubenswrapper[4757]: I1006 13:39:08.736154 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:08 crc kubenswrapper[4757]: I1006 13:39:08.736178 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:08Z","lastTransitionTime":"2025-10-06T13:39:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:08 crc kubenswrapper[4757]: I1006 13:39:08.839257 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:08 crc kubenswrapper[4757]: I1006 13:39:08.839315 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:08 crc kubenswrapper[4757]: I1006 13:39:08.839331 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:08 crc kubenswrapper[4757]: I1006 13:39:08.839356 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:08 crc kubenswrapper[4757]: I1006 13:39:08.839372 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:08Z","lastTransitionTime":"2025-10-06T13:39:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:08 crc kubenswrapper[4757]: I1006 13:39:08.942361 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:08 crc kubenswrapper[4757]: I1006 13:39:08.942418 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:08 crc kubenswrapper[4757]: I1006 13:39:08.942435 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:08 crc kubenswrapper[4757]: I1006 13:39:08.942458 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:08 crc kubenswrapper[4757]: I1006 13:39:08.942476 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:08Z","lastTransitionTime":"2025-10-06T13:39:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:09 crc kubenswrapper[4757]: I1006 13:39:09.045473 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:09 crc kubenswrapper[4757]: I1006 13:39:09.045524 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:09 crc kubenswrapper[4757]: I1006 13:39:09.045541 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:09 crc kubenswrapper[4757]: I1006 13:39:09.045564 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:09 crc kubenswrapper[4757]: I1006 13:39:09.045580 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:09Z","lastTransitionTime":"2025-10-06T13:39:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:09 crc kubenswrapper[4757]: I1006 13:39:09.149207 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:09 crc kubenswrapper[4757]: I1006 13:39:09.149688 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:09 crc kubenswrapper[4757]: I1006 13:39:09.149809 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:09 crc kubenswrapper[4757]: I1006 13:39:09.149912 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:09 crc kubenswrapper[4757]: I1006 13:39:09.149993 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:09Z","lastTransitionTime":"2025-10-06T13:39:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:09 crc kubenswrapper[4757]: I1006 13:39:09.180041 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 06 13:39:09 crc kubenswrapper[4757]: E1006 13:39:09.180456 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 06 13:39:09 crc kubenswrapper[4757]: I1006 13:39:09.253685 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:09 crc kubenswrapper[4757]: I1006 13:39:09.253760 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:09 crc kubenswrapper[4757]: I1006 13:39:09.253782 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:09 crc kubenswrapper[4757]: I1006 13:39:09.253812 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:09 crc kubenswrapper[4757]: I1006 13:39:09.253834 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:09Z","lastTransitionTime":"2025-10-06T13:39:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:09 crc kubenswrapper[4757]: I1006 13:39:09.357453 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:09 crc kubenswrapper[4757]: I1006 13:39:09.357844 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:09 crc kubenswrapper[4757]: I1006 13:39:09.357992 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:09 crc kubenswrapper[4757]: I1006 13:39:09.358180 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:09 crc kubenswrapper[4757]: I1006 13:39:09.358315 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:09Z","lastTransitionTime":"2025-10-06T13:39:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:09 crc kubenswrapper[4757]: I1006 13:39:09.462032 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:09 crc kubenswrapper[4757]: I1006 13:39:09.462137 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:09 crc kubenswrapper[4757]: I1006 13:39:09.462158 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:09 crc kubenswrapper[4757]: I1006 13:39:09.462184 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:09 crc kubenswrapper[4757]: I1006 13:39:09.462202 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:09Z","lastTransitionTime":"2025-10-06T13:39:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:09 crc kubenswrapper[4757]: I1006 13:39:09.569700 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:09 crc kubenswrapper[4757]: I1006 13:39:09.569840 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:09 crc kubenswrapper[4757]: I1006 13:39:09.569869 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:09 crc kubenswrapper[4757]: I1006 13:39:09.569899 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:09 crc kubenswrapper[4757]: I1006 13:39:09.569920 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:09Z","lastTransitionTime":"2025-10-06T13:39:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:09 crc kubenswrapper[4757]: I1006 13:39:09.672725 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:09 crc kubenswrapper[4757]: I1006 13:39:09.672788 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:09 crc kubenswrapper[4757]: I1006 13:39:09.672807 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:09 crc kubenswrapper[4757]: I1006 13:39:09.672832 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:09 crc kubenswrapper[4757]: I1006 13:39:09.672850 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:09Z","lastTransitionTime":"2025-10-06T13:39:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:09 crc kubenswrapper[4757]: I1006 13:39:09.776591 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:09 crc kubenswrapper[4757]: I1006 13:39:09.777043 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:09 crc kubenswrapper[4757]: I1006 13:39:09.777243 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:09 crc kubenswrapper[4757]: I1006 13:39:09.777447 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:09 crc kubenswrapper[4757]: I1006 13:39:09.777639 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:09Z","lastTransitionTime":"2025-10-06T13:39:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:09 crc kubenswrapper[4757]: I1006 13:39:09.881232 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:09 crc kubenswrapper[4757]: I1006 13:39:09.881307 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:09 crc kubenswrapper[4757]: I1006 13:39:09.881331 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:09 crc kubenswrapper[4757]: I1006 13:39:09.881360 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:09 crc kubenswrapper[4757]: I1006 13:39:09.881381 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:09Z","lastTransitionTime":"2025-10-06T13:39:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:09 crc kubenswrapper[4757]: I1006 13:39:09.967609 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/8a0a24d2-8946-4710-91f2-cc59ecedb5e3-metrics-certs\") pod \"network-metrics-daemon-sc9qx\" (UID: \"8a0a24d2-8946-4710-91f2-cc59ecedb5e3\") " pod="openshift-multus/network-metrics-daemon-sc9qx" Oct 06 13:39:09 crc kubenswrapper[4757]: E1006 13:39:09.967818 4757 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 06 13:39:09 crc kubenswrapper[4757]: E1006 13:39:09.968316 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/8a0a24d2-8946-4710-91f2-cc59ecedb5e3-metrics-certs podName:8a0a24d2-8946-4710-91f2-cc59ecedb5e3 nodeName:}" failed. No retries permitted until 2025-10-06 13:39:13.968281875 +0000 UTC m=+42.465600452 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/8a0a24d2-8946-4710-91f2-cc59ecedb5e3-metrics-certs") pod "network-metrics-daemon-sc9qx" (UID: "8a0a24d2-8946-4710-91f2-cc59ecedb5e3") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 06 13:39:09 crc kubenswrapper[4757]: I1006 13:39:09.984763 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:09 crc kubenswrapper[4757]: I1006 13:39:09.984838 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:09 crc kubenswrapper[4757]: I1006 13:39:09.984859 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:09 crc kubenswrapper[4757]: I1006 13:39:09.984885 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:09 crc kubenswrapper[4757]: I1006 13:39:09.984903 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:09Z","lastTransitionTime":"2025-10-06T13:39:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:10 crc kubenswrapper[4757]: I1006 13:39:10.088316 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:10 crc kubenswrapper[4757]: I1006 13:39:10.088402 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:10 crc kubenswrapper[4757]: I1006 13:39:10.088425 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:10 crc kubenswrapper[4757]: I1006 13:39:10.088448 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:10 crc kubenswrapper[4757]: I1006 13:39:10.088467 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:10Z","lastTransitionTime":"2025-10-06T13:39:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:10 crc kubenswrapper[4757]: I1006 13:39:10.179871 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 06 13:39:10 crc kubenswrapper[4757]: I1006 13:39:10.179986 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-sc9qx" Oct 06 13:39:10 crc kubenswrapper[4757]: E1006 13:39:10.180035 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 06 13:39:10 crc kubenswrapper[4757]: I1006 13:39:10.180108 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 06 13:39:10 crc kubenswrapper[4757]: E1006 13:39:10.180232 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-sc9qx" podUID="8a0a24d2-8946-4710-91f2-cc59ecedb5e3" Oct 06 13:39:10 crc kubenswrapper[4757]: E1006 13:39:10.180337 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 06 13:39:10 crc kubenswrapper[4757]: I1006 13:39:10.191405 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:10 crc kubenswrapper[4757]: I1006 13:39:10.191450 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:10 crc kubenswrapper[4757]: I1006 13:39:10.191466 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:10 crc kubenswrapper[4757]: I1006 13:39:10.191487 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:10 crc kubenswrapper[4757]: I1006 13:39:10.191510 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:10Z","lastTransitionTime":"2025-10-06T13:39:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:10 crc kubenswrapper[4757]: I1006 13:39:10.295256 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:10 crc kubenswrapper[4757]: I1006 13:39:10.295318 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:10 crc kubenswrapper[4757]: I1006 13:39:10.295342 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:10 crc kubenswrapper[4757]: I1006 13:39:10.295373 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:10 crc kubenswrapper[4757]: I1006 13:39:10.295395 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:10Z","lastTransitionTime":"2025-10-06T13:39:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:10 crc kubenswrapper[4757]: I1006 13:39:10.398662 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:10 crc kubenswrapper[4757]: I1006 13:39:10.398727 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:10 crc kubenswrapper[4757]: I1006 13:39:10.398749 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:10 crc kubenswrapper[4757]: I1006 13:39:10.398775 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:10 crc kubenswrapper[4757]: I1006 13:39:10.398795 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:10Z","lastTransitionTime":"2025-10-06T13:39:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:10 crc kubenswrapper[4757]: I1006 13:39:10.502254 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:10 crc kubenswrapper[4757]: I1006 13:39:10.502364 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:10 crc kubenswrapper[4757]: I1006 13:39:10.502389 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:10 crc kubenswrapper[4757]: I1006 13:39:10.502418 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:10 crc kubenswrapper[4757]: I1006 13:39:10.502445 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:10Z","lastTransitionTime":"2025-10-06T13:39:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:10 crc kubenswrapper[4757]: I1006 13:39:10.605815 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:10 crc kubenswrapper[4757]: I1006 13:39:10.605873 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:10 crc kubenswrapper[4757]: I1006 13:39:10.605894 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:10 crc kubenswrapper[4757]: I1006 13:39:10.605918 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:10 crc kubenswrapper[4757]: I1006 13:39:10.605936 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:10Z","lastTransitionTime":"2025-10-06T13:39:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:10 crc kubenswrapper[4757]: I1006 13:39:10.708361 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:10 crc kubenswrapper[4757]: I1006 13:39:10.708424 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:10 crc kubenswrapper[4757]: I1006 13:39:10.708442 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:10 crc kubenswrapper[4757]: I1006 13:39:10.708469 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:10 crc kubenswrapper[4757]: I1006 13:39:10.708488 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:10Z","lastTransitionTime":"2025-10-06T13:39:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:10 crc kubenswrapper[4757]: I1006 13:39:10.811421 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:10 crc kubenswrapper[4757]: I1006 13:39:10.811466 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:10 crc kubenswrapper[4757]: I1006 13:39:10.811499 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:10 crc kubenswrapper[4757]: I1006 13:39:10.811519 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:10 crc kubenswrapper[4757]: I1006 13:39:10.811532 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:10Z","lastTransitionTime":"2025-10-06T13:39:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:10 crc kubenswrapper[4757]: I1006 13:39:10.914298 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:10 crc kubenswrapper[4757]: I1006 13:39:10.914359 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:10 crc kubenswrapper[4757]: I1006 13:39:10.914381 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:10 crc kubenswrapper[4757]: I1006 13:39:10.914411 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:10 crc kubenswrapper[4757]: I1006 13:39:10.914434 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:10Z","lastTransitionTime":"2025-10-06T13:39:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:11 crc kubenswrapper[4757]: I1006 13:39:11.017526 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:11 crc kubenswrapper[4757]: I1006 13:39:11.017591 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:11 crc kubenswrapper[4757]: I1006 13:39:11.017608 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:11 crc kubenswrapper[4757]: I1006 13:39:11.017635 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:11 crc kubenswrapper[4757]: I1006 13:39:11.017653 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:11Z","lastTransitionTime":"2025-10-06T13:39:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:11 crc kubenswrapper[4757]: I1006 13:39:11.120787 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:11 crc kubenswrapper[4757]: I1006 13:39:11.120861 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:11 crc kubenswrapper[4757]: I1006 13:39:11.120876 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:11 crc kubenswrapper[4757]: I1006 13:39:11.120899 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:11 crc kubenswrapper[4757]: I1006 13:39:11.120913 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:11Z","lastTransitionTime":"2025-10-06T13:39:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:11 crc kubenswrapper[4757]: I1006 13:39:11.179464 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 06 13:39:11 crc kubenswrapper[4757]: E1006 13:39:11.179705 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 06 13:39:11 crc kubenswrapper[4757]: I1006 13:39:11.224290 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:11 crc kubenswrapper[4757]: I1006 13:39:11.224371 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:11 crc kubenswrapper[4757]: I1006 13:39:11.224394 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:11 crc kubenswrapper[4757]: I1006 13:39:11.224426 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:11 crc kubenswrapper[4757]: I1006 13:39:11.224447 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:11Z","lastTransitionTime":"2025-10-06T13:39:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:11 crc kubenswrapper[4757]: I1006 13:39:11.327600 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:11 crc kubenswrapper[4757]: I1006 13:39:11.327644 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:11 crc kubenswrapper[4757]: I1006 13:39:11.327656 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:11 crc kubenswrapper[4757]: I1006 13:39:11.327674 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:11 crc kubenswrapper[4757]: I1006 13:39:11.327686 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:11Z","lastTransitionTime":"2025-10-06T13:39:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:11 crc kubenswrapper[4757]: I1006 13:39:11.430566 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:11 crc kubenswrapper[4757]: I1006 13:39:11.430630 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:11 crc kubenswrapper[4757]: I1006 13:39:11.430641 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:11 crc kubenswrapper[4757]: I1006 13:39:11.430654 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:11 crc kubenswrapper[4757]: I1006 13:39:11.430665 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:11Z","lastTransitionTime":"2025-10-06T13:39:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:11 crc kubenswrapper[4757]: I1006 13:39:11.533377 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:11 crc kubenswrapper[4757]: I1006 13:39:11.533429 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:11 crc kubenswrapper[4757]: I1006 13:39:11.533446 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:11 crc kubenswrapper[4757]: I1006 13:39:11.533462 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:11 crc kubenswrapper[4757]: I1006 13:39:11.533472 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:11Z","lastTransitionTime":"2025-10-06T13:39:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:11 crc kubenswrapper[4757]: I1006 13:39:11.638003 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:11 crc kubenswrapper[4757]: I1006 13:39:11.638066 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:11 crc kubenswrapper[4757]: I1006 13:39:11.638083 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:11 crc kubenswrapper[4757]: I1006 13:39:11.638125 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:11 crc kubenswrapper[4757]: I1006 13:39:11.638148 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:11Z","lastTransitionTime":"2025-10-06T13:39:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:11 crc kubenswrapper[4757]: I1006 13:39:11.740466 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:11 crc kubenswrapper[4757]: I1006 13:39:11.740516 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:11 crc kubenswrapper[4757]: I1006 13:39:11.740524 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:11 crc kubenswrapper[4757]: I1006 13:39:11.740540 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:11 crc kubenswrapper[4757]: I1006 13:39:11.740550 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:11Z","lastTransitionTime":"2025-10-06T13:39:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:11 crc kubenswrapper[4757]: I1006 13:39:11.843248 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:11 crc kubenswrapper[4757]: I1006 13:39:11.843298 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:11 crc kubenswrapper[4757]: I1006 13:39:11.843315 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:11 crc kubenswrapper[4757]: I1006 13:39:11.843343 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:11 crc kubenswrapper[4757]: I1006 13:39:11.843362 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:11Z","lastTransitionTime":"2025-10-06T13:39:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:11 crc kubenswrapper[4757]: I1006 13:39:11.946609 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:11 crc kubenswrapper[4757]: I1006 13:39:11.946673 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:11 crc kubenswrapper[4757]: I1006 13:39:11.946697 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:11 crc kubenswrapper[4757]: I1006 13:39:11.946722 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:11 crc kubenswrapper[4757]: I1006 13:39:11.946741 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:11Z","lastTransitionTime":"2025-10-06T13:39:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:12 crc kubenswrapper[4757]: I1006 13:39:12.050439 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:12 crc kubenswrapper[4757]: I1006 13:39:12.050484 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:12 crc kubenswrapper[4757]: I1006 13:39:12.050502 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:12 crc kubenswrapper[4757]: I1006 13:39:12.050523 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:12 crc kubenswrapper[4757]: I1006 13:39:12.050539 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:12Z","lastTransitionTime":"2025-10-06T13:39:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:12 crc kubenswrapper[4757]: I1006 13:39:12.152861 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:12 crc kubenswrapper[4757]: I1006 13:39:12.152916 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:12 crc kubenswrapper[4757]: I1006 13:39:12.152932 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:12 crc kubenswrapper[4757]: I1006 13:39:12.152954 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:12 crc kubenswrapper[4757]: I1006 13:39:12.152970 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:12Z","lastTransitionTime":"2025-10-06T13:39:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:12 crc kubenswrapper[4757]: I1006 13:39:12.180030 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-sc9qx" Oct 06 13:39:12 crc kubenswrapper[4757]: E1006 13:39:12.180205 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-sc9qx" podUID="8a0a24d2-8946-4710-91f2-cc59ecedb5e3" Oct 06 13:39:12 crc kubenswrapper[4757]: I1006 13:39:12.180602 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 06 13:39:12 crc kubenswrapper[4757]: I1006 13:39:12.180759 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 06 13:39:12 crc kubenswrapper[4757]: E1006 13:39:12.181137 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 06 13:39:12 crc kubenswrapper[4757]: E1006 13:39:12.180923 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 06 13:39:12 crc kubenswrapper[4757]: I1006 13:39:12.202318 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:12Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:12 crc kubenswrapper[4757]: I1006 13:39:12.220718 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b45680ebd17f15c56456a2c0eb5c7b58f6f42524be18df11dacbb3f4d904eac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6454ffe81c62fdfd6a5c0ee4aff48e55b1dd83b14b07a93f915fc3023f6bd255\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:12Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:12 crc kubenswrapper[4757]: I1006 13:39:12.240389 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:12Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:12 crc kubenswrapper[4757]: I1006 13:39:12.257252 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:12 crc kubenswrapper[4757]: I1006 13:39:12.257350 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:12 crc kubenswrapper[4757]: I1006 13:39:12.257364 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:12 crc kubenswrapper[4757]: I1006 13:39:12.257549 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:12 crc kubenswrapper[4757]: I1006 13:39:12.257564 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:12Z","lastTransitionTime":"2025-10-06T13:39:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:12 crc kubenswrapper[4757]: I1006 13:39:12.258268 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9j5jn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de18b9fe-e396-469e-a6f6-d87ce91f3270\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4b5ed23b64e4bbc9f76f8cdef77d7f608d8ba029b540b69f64faf266f843155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2cslg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9j5jn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:12Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:12 crc kubenswrapper[4757]: I1006 13:39:12.278625 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70151c959d98e2cb056e57e50b67c68b1c031beb375b357c8941bc9b81dc040f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:12Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:12 crc kubenswrapper[4757]: I1006 13:39:12.300885 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:12Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:12 crc kubenswrapper[4757]: I1006 13:39:12.322482 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6624d05-e024-49f2-bf87-33e7ea4fccbb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbe06e8892901366539846fa68ea4e379c32ca7b21843383dc4a72ac70f92b98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75c8038f49690378c7322e4d2e50f7d5073bb744c3f64317b7c3f4acffaa338d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://df2b012b79afa6379b4a7add72a9093bbdbfcc8d618d6eae350a773fdddacfa8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4093bde478f4910c2b1e2341bb0b3f640454b131e31b9ab7ad5900b8260b7561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1580d51cb7ec22f70787157dc80236a312eca77785329c663d78c02ee9a0685d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38df7deb98a16e465d5fdbad8e04c2e62c5cfb418a48fe9f01da335dba2907ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://81720dfd043047aefa11096d2632cf7b232bb217cf2f674170f00aaeeb811f64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81720dfd043047aefa11096d2632cf7b232bb217cf2f674170f00aaeeb811f64\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-06T13:39:04Z\\\",\\\"message\\\":\\\"\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1006 13:39:04.366323 6196 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-console/console\\\\\\\"}\\\\nI1006 13:39:04.366339 6196 services_controller.go:360] Finished syncing service console on namespace openshift-console for network=default : 2.436929ms\\\\nI1006 13:39:04.366598 6196 obj_retry.go:551] Creating *factory.egressNode crc took: 4.572966ms\\\\nI1006 13:39:04.366619 6196 factory.go:1336] Added *v1.Node event handler 7\\\\nI1006 13:39:04.366642 6196 factory.go:1336] Added *v1.EgressIP event handler 8\\\\nI1006 13:39:04.366868 6196 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI1006 13:39:04.366989 6196 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI1006 13:39:04.367026 6196 ovnkube.go:599] Stopped ovnkube\\\\nI1006 13:39:04.367053 6196 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1006 13:39:04.367182 6196 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: failed to add event handler: handler {0x1e60340 0x1e60020 0x1e5ffc0} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotatio\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-06T13:39:03Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-58bhb_openshift-ovn-kubernetes(a6624d05-e024-49f2-bf87-33e7ea4fccbb)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5852bc46ae07b295a97cca9f5ef7d06484c12744c0f6ee57b2d14acee319fcaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3fe5f1f425ddd770c518939dee061dc2641c1ca6c3538d8d30ac0724a6353936\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3fe5f1f425ddd770c518939dee061dc2641c1ca6c3538d8d30ac0724a6353936\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-58bhb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:12Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:12 crc kubenswrapper[4757]: I1006 13:39:12.337556 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-sc9qx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a0a24d2-8946-4710-91f2-cc59ecedb5e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwpmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwpmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:39:06Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-sc9qx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:12Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:12 crc kubenswrapper[4757]: I1006 13:39:12.351294 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aab0f26-c4f7-40b2-892f-0c7d8586a1c1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4119269e9f663bc5c38f4fdc8a5b32f1c3d8446fed6fd36172c41207c9c287b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1bef054ed2b7c27f200f7ab089158ba9946ca17b507c85c377504f539812fcf6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb90630132db1396d9009fe35d6d6b8ab6afe59d32f196cf7a10f3179103227\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://123bbe58d8664c8fad1b86c9c321228f7ffe7cdf14c44e1d9be7802e10a3f0d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://813c083e0e8ba3b397a4b2795de1eac82299d7f673832272138f20162b7f2db2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-06T13:38:46Z\\\",\\\"message\\\":\\\"W1006 13:38:35.655009 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1006 13:38:35.655588 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759757915 cert, and key in /tmp/serving-cert-2033919833/serving-signer.crt, /tmp/serving-cert-2033919833/serving-signer.key\\\\nI1006 13:38:35.906575 1 observer_polling.go:159] Starting file observer\\\\nW1006 13:38:35.908632 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1006 13:38:35.908812 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1006 13:38:35.911572 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2033919833/tls.crt::/tmp/serving-cert-2033919833/tls.key\\\\\\\"\\\\nF1006 13:38:46.393904 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:35Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18af66d22d5905beeec34c4d158a88aadb1a8076e423996cd42e4172a31426e0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:35Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca584765a9838801f99c7fb1f7bbcc740e684033b1b19035ef95b72526c6eda1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca584765a9838801f99c7fb1f7bbcc740e684033b1b19035ef95b72526c6eda1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:32Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:12Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:12 crc kubenswrapper[4757]: I1006 13:39:12.359983 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:12 crc kubenswrapper[4757]: I1006 13:39:12.360015 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:12 crc kubenswrapper[4757]: I1006 13:39:12.360031 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:12 crc kubenswrapper[4757]: I1006 13:39:12.360047 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:12 crc kubenswrapper[4757]: I1006 13:39:12.360057 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:12Z","lastTransitionTime":"2025-10-06T13:39:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:12 crc kubenswrapper[4757]: I1006 13:39:12.370363 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d100743e6a2245c99a227c3380c1e9cbdfec0eff361d21977a3e313fa8f7bfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:12Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:12 crc kubenswrapper[4757]: I1006 13:39:12.380639 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0010c888-d5ad-4b2b-8309-1647fdf0dee3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a2028c8aaf6b1504a3b00edc15272958a5254f6e621734988e959fe0720f1ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kvf5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://151c97566cda63000d69b51293c84fad70c6e331d3a1b62bd3a03ee5732d5214\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kvf5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-7tb7h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:12Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:12 crc kubenswrapper[4757]: I1006 13:39:12.400112 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9qf7z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9144d9fd-70d7-4a29-8e6b-c020c611980a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7771512cbe14fa9b8d61be3a354a5fb8436b84ddf455ae4426440f1599f7ad90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mf4fb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9qf7z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:12Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:12 crc kubenswrapper[4757]: I1006 13:39:12.424155 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rhrzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62d4cce6-0583-40a6-b7ea-2996d07b49b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://886c6f797531b702123d9931997b7b5e3ebcf9c14c566ff90e0382eae806f925\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a6cfbbedbad94b500e9bc7c5777459ffbf3db038e8a520a2c9c28d381955df55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a6cfbbedbad94b500e9bc7c5777459ffbf3db038e8a520a2c9c28d381955df55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8c16d3a08acde90ccf1beb5e9ced373395af7436ddbc05c24f6489f6f49fc75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8c16d3a08acde90ccf1beb5e9ced373395af7436ddbc05c24f6489f6f49fc75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7471a1d670fcee421cb43e0a8b8a2b5e15c9d428659f8d63be54895493225242\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7471a1d670fcee421cb43e0a8b8a2b5e15c9d428659f8d63be54895493225242\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d5889f0b8d556cbb5791eb0d9d5475c4fa8beb0c5e9614081c8a7002353282a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d5889f0b8d556cbb5791eb0d9d5475c4fa8beb0c5e9614081c8a7002353282a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0e664e1241d006c37ace1635ef93d30b3062960868874ab67b024d872e8b352\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0e664e1241d006c37ace1635ef93d30b3062960868874ab67b024d872e8b352\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4cbf946357ea3bc8763e1c1b16d8794976662c55043426132f1ece7c52dca82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4cbf946357ea3bc8763e1c1b16d8794976662c55043426132f1ece7c52dca82\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rhrzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:12Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:12 crc kubenswrapper[4757]: I1006 13:39:12.438728 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-j889t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a097c3c1-7cfd-4ba8-878d-40b971843e92\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cf3eee034e9bc6cc2729e4cd23569f535ea8d2058f1576f75baaa5cf8407f2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-29wtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:54Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-j889t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:12Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:12 crc kubenswrapper[4757]: I1006 13:39:12.452858 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l2rjj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"34af6a7e-1458-4f7c-bc54-69fb80966b6b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://53547fa1f6011c3003bed92bbd7200050777786dd1fe8b3979b8d9c695c7259d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:39:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5csnl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3d629cc4ee4a614d547fe962c1367b1717f185765e1ccc325fd941f37e3d656\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:39:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5csnl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:39:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-l2rjj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:12Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:12 crc kubenswrapper[4757]: I1006 13:39:12.462930 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:12 crc kubenswrapper[4757]: I1006 13:39:12.462973 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:12 crc kubenswrapper[4757]: I1006 13:39:12.462990 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:12 crc kubenswrapper[4757]: I1006 13:39:12.463010 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:12 crc kubenswrapper[4757]: I1006 13:39:12.463023 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:12Z","lastTransitionTime":"2025-10-06T13:39:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:12 crc kubenswrapper[4757]: I1006 13:39:12.472741 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cd6138e-cd0c-4140-8173-eeb737e5b20a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e5392ae609235f0d0526e4d115125c20a75af131b89c07ae6bfa3b4bac84108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d06bc2afab6df22e2c8140de3e6288a3e5f4190b6bdb31a2e71170275e70cae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://056d1a4bdaee6e39a0ba0105c9b48d23cd37c37936951e8e2aa123f6de93463d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e08edbfb6418e26eb6c3ef0d229a623bc625e8e20c29d41a95adc8ac8dc2534\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:32Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:12Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:12 crc kubenswrapper[4757]: I1006 13:39:12.565490 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:12 crc kubenswrapper[4757]: I1006 13:39:12.565562 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:12 crc kubenswrapper[4757]: I1006 13:39:12.565581 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:12 crc kubenswrapper[4757]: I1006 13:39:12.565607 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:12 crc kubenswrapper[4757]: I1006 13:39:12.565624 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:12Z","lastTransitionTime":"2025-10-06T13:39:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:12 crc kubenswrapper[4757]: I1006 13:39:12.668388 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:12 crc kubenswrapper[4757]: I1006 13:39:12.668447 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:12 crc kubenswrapper[4757]: I1006 13:39:12.668461 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:12 crc kubenswrapper[4757]: I1006 13:39:12.668478 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:12 crc kubenswrapper[4757]: I1006 13:39:12.668490 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:12Z","lastTransitionTime":"2025-10-06T13:39:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:12 crc kubenswrapper[4757]: I1006 13:39:12.771185 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:12 crc kubenswrapper[4757]: I1006 13:39:12.771246 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:12 crc kubenswrapper[4757]: I1006 13:39:12.771264 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:12 crc kubenswrapper[4757]: I1006 13:39:12.771288 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:12 crc kubenswrapper[4757]: I1006 13:39:12.771305 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:12Z","lastTransitionTime":"2025-10-06T13:39:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:12 crc kubenswrapper[4757]: I1006 13:39:12.873925 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:12 crc kubenswrapper[4757]: I1006 13:39:12.873996 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:12 crc kubenswrapper[4757]: I1006 13:39:12.874014 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:12 crc kubenswrapper[4757]: I1006 13:39:12.874039 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:12 crc kubenswrapper[4757]: I1006 13:39:12.874058 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:12Z","lastTransitionTime":"2025-10-06T13:39:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:12 crc kubenswrapper[4757]: I1006 13:39:12.977809 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:12 crc kubenswrapper[4757]: I1006 13:39:12.977882 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:12 crc kubenswrapper[4757]: I1006 13:39:12.977899 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:12 crc kubenswrapper[4757]: I1006 13:39:12.977926 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:12 crc kubenswrapper[4757]: I1006 13:39:12.977943 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:12Z","lastTransitionTime":"2025-10-06T13:39:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:13 crc kubenswrapper[4757]: I1006 13:39:13.080882 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:13 crc kubenswrapper[4757]: I1006 13:39:13.080964 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:13 crc kubenswrapper[4757]: I1006 13:39:13.080986 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:13 crc kubenswrapper[4757]: I1006 13:39:13.081016 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:13 crc kubenswrapper[4757]: I1006 13:39:13.081038 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:13Z","lastTransitionTime":"2025-10-06T13:39:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:13 crc kubenswrapper[4757]: I1006 13:39:13.179638 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 06 13:39:13 crc kubenswrapper[4757]: E1006 13:39:13.179830 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 06 13:39:13 crc kubenswrapper[4757]: I1006 13:39:13.184371 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:13 crc kubenswrapper[4757]: I1006 13:39:13.184427 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:13 crc kubenswrapper[4757]: I1006 13:39:13.184445 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:13 crc kubenswrapper[4757]: I1006 13:39:13.184469 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:13 crc kubenswrapper[4757]: I1006 13:39:13.184487 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:13Z","lastTransitionTime":"2025-10-06T13:39:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:13 crc kubenswrapper[4757]: I1006 13:39:13.287064 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:13 crc kubenswrapper[4757]: I1006 13:39:13.287196 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:13 crc kubenswrapper[4757]: I1006 13:39:13.287222 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:13 crc kubenswrapper[4757]: I1006 13:39:13.287250 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:13 crc kubenswrapper[4757]: I1006 13:39:13.287271 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:13Z","lastTransitionTime":"2025-10-06T13:39:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:13 crc kubenswrapper[4757]: I1006 13:39:13.389857 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:13 crc kubenswrapper[4757]: I1006 13:39:13.389897 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:13 crc kubenswrapper[4757]: I1006 13:39:13.389906 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:13 crc kubenswrapper[4757]: I1006 13:39:13.389950 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:13 crc kubenswrapper[4757]: I1006 13:39:13.389959 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:13Z","lastTransitionTime":"2025-10-06T13:39:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:13 crc kubenswrapper[4757]: I1006 13:39:13.492422 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:13 crc kubenswrapper[4757]: I1006 13:39:13.492454 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:13 crc kubenswrapper[4757]: I1006 13:39:13.492465 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:13 crc kubenswrapper[4757]: I1006 13:39:13.492479 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:13 crc kubenswrapper[4757]: I1006 13:39:13.492489 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:13Z","lastTransitionTime":"2025-10-06T13:39:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:13 crc kubenswrapper[4757]: I1006 13:39:13.594980 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:13 crc kubenswrapper[4757]: I1006 13:39:13.595147 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:13 crc kubenswrapper[4757]: I1006 13:39:13.595169 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:13 crc kubenswrapper[4757]: I1006 13:39:13.595193 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:13 crc kubenswrapper[4757]: I1006 13:39:13.595210 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:13Z","lastTransitionTime":"2025-10-06T13:39:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:13 crc kubenswrapper[4757]: I1006 13:39:13.698393 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:13 crc kubenswrapper[4757]: I1006 13:39:13.698431 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:13 crc kubenswrapper[4757]: I1006 13:39:13.698442 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:13 crc kubenswrapper[4757]: I1006 13:39:13.698459 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:13 crc kubenswrapper[4757]: I1006 13:39:13.698473 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:13Z","lastTransitionTime":"2025-10-06T13:39:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:13 crc kubenswrapper[4757]: I1006 13:39:13.801573 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:13 crc kubenswrapper[4757]: I1006 13:39:13.801627 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:13 crc kubenswrapper[4757]: I1006 13:39:13.801645 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:13 crc kubenswrapper[4757]: I1006 13:39:13.801673 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:13 crc kubenswrapper[4757]: I1006 13:39:13.801697 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:13Z","lastTransitionTime":"2025-10-06T13:39:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:13 crc kubenswrapper[4757]: I1006 13:39:13.905138 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:13 crc kubenswrapper[4757]: I1006 13:39:13.905193 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:13 crc kubenswrapper[4757]: I1006 13:39:13.905204 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:13 crc kubenswrapper[4757]: I1006 13:39:13.905221 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:13 crc kubenswrapper[4757]: I1006 13:39:13.905232 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:13Z","lastTransitionTime":"2025-10-06T13:39:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:14 crc kubenswrapper[4757]: I1006 13:39:14.008287 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:14 crc kubenswrapper[4757]: I1006 13:39:14.008361 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:14 crc kubenswrapper[4757]: I1006 13:39:14.008379 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:14 crc kubenswrapper[4757]: I1006 13:39:14.008405 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:14 crc kubenswrapper[4757]: I1006 13:39:14.008422 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:14Z","lastTransitionTime":"2025-10-06T13:39:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:14 crc kubenswrapper[4757]: I1006 13:39:14.010810 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/8a0a24d2-8946-4710-91f2-cc59ecedb5e3-metrics-certs\") pod \"network-metrics-daemon-sc9qx\" (UID: \"8a0a24d2-8946-4710-91f2-cc59ecedb5e3\") " pod="openshift-multus/network-metrics-daemon-sc9qx" Oct 06 13:39:14 crc kubenswrapper[4757]: E1006 13:39:14.011008 4757 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 06 13:39:14 crc kubenswrapper[4757]: E1006 13:39:14.011062 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/8a0a24d2-8946-4710-91f2-cc59ecedb5e3-metrics-certs podName:8a0a24d2-8946-4710-91f2-cc59ecedb5e3 nodeName:}" failed. No retries permitted until 2025-10-06 13:39:22.011049385 +0000 UTC m=+50.508367922 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/8a0a24d2-8946-4710-91f2-cc59ecedb5e3-metrics-certs") pod "network-metrics-daemon-sc9qx" (UID: "8a0a24d2-8946-4710-91f2-cc59ecedb5e3") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 06 13:39:14 crc kubenswrapper[4757]: I1006 13:39:14.111605 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:14 crc kubenswrapper[4757]: I1006 13:39:14.111671 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:14 crc kubenswrapper[4757]: I1006 13:39:14.111691 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:14 crc kubenswrapper[4757]: I1006 13:39:14.111715 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:14 crc kubenswrapper[4757]: I1006 13:39:14.111734 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:14Z","lastTransitionTime":"2025-10-06T13:39:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:14 crc kubenswrapper[4757]: I1006 13:39:14.180270 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 06 13:39:14 crc kubenswrapper[4757]: I1006 13:39:14.180293 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-sc9qx" Oct 06 13:39:14 crc kubenswrapper[4757]: I1006 13:39:14.180349 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 06 13:39:14 crc kubenswrapper[4757]: E1006 13:39:14.180449 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 06 13:39:14 crc kubenswrapper[4757]: E1006 13:39:14.180585 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-sc9qx" podUID="8a0a24d2-8946-4710-91f2-cc59ecedb5e3" Oct 06 13:39:14 crc kubenswrapper[4757]: E1006 13:39:14.180760 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 06 13:39:14 crc kubenswrapper[4757]: I1006 13:39:14.213927 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:14 crc kubenswrapper[4757]: I1006 13:39:14.214001 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:14 crc kubenswrapper[4757]: I1006 13:39:14.214055 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:14 crc kubenswrapper[4757]: I1006 13:39:14.214084 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:14 crc kubenswrapper[4757]: I1006 13:39:14.214133 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:14Z","lastTransitionTime":"2025-10-06T13:39:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:14 crc kubenswrapper[4757]: I1006 13:39:14.316809 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:14 crc kubenswrapper[4757]: I1006 13:39:14.316871 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:14 crc kubenswrapper[4757]: I1006 13:39:14.316888 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:14 crc kubenswrapper[4757]: I1006 13:39:14.316907 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:14 crc kubenswrapper[4757]: I1006 13:39:14.316922 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:14Z","lastTransitionTime":"2025-10-06T13:39:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:14 crc kubenswrapper[4757]: I1006 13:39:14.420694 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:14 crc kubenswrapper[4757]: I1006 13:39:14.420755 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:14 crc kubenswrapper[4757]: I1006 13:39:14.420772 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:14 crc kubenswrapper[4757]: I1006 13:39:14.420800 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:14 crc kubenswrapper[4757]: I1006 13:39:14.420818 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:14Z","lastTransitionTime":"2025-10-06T13:39:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:14 crc kubenswrapper[4757]: I1006 13:39:14.524162 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:14 crc kubenswrapper[4757]: I1006 13:39:14.524238 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:14 crc kubenswrapper[4757]: I1006 13:39:14.524262 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:14 crc kubenswrapper[4757]: I1006 13:39:14.524294 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:14 crc kubenswrapper[4757]: I1006 13:39:14.524319 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:14Z","lastTransitionTime":"2025-10-06T13:39:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:14 crc kubenswrapper[4757]: I1006 13:39:14.627721 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:14 crc kubenswrapper[4757]: I1006 13:39:14.627825 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:14 crc kubenswrapper[4757]: I1006 13:39:14.627851 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:14 crc kubenswrapper[4757]: I1006 13:39:14.627941 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:14 crc kubenswrapper[4757]: I1006 13:39:14.627961 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:14Z","lastTransitionTime":"2025-10-06T13:39:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:14 crc kubenswrapper[4757]: I1006 13:39:14.731666 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:14 crc kubenswrapper[4757]: I1006 13:39:14.731748 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:14 crc kubenswrapper[4757]: I1006 13:39:14.731761 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:14 crc kubenswrapper[4757]: I1006 13:39:14.731781 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:14 crc kubenswrapper[4757]: I1006 13:39:14.731793 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:14Z","lastTransitionTime":"2025-10-06T13:39:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:14 crc kubenswrapper[4757]: I1006 13:39:14.834399 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:14 crc kubenswrapper[4757]: I1006 13:39:14.834493 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:14 crc kubenswrapper[4757]: I1006 13:39:14.834517 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:14 crc kubenswrapper[4757]: I1006 13:39:14.834547 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:14 crc kubenswrapper[4757]: I1006 13:39:14.834570 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:14Z","lastTransitionTime":"2025-10-06T13:39:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:14 crc kubenswrapper[4757]: I1006 13:39:14.937920 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:14 crc kubenswrapper[4757]: I1006 13:39:14.938000 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:14 crc kubenswrapper[4757]: I1006 13:39:14.938023 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:14 crc kubenswrapper[4757]: I1006 13:39:14.938052 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:14 crc kubenswrapper[4757]: I1006 13:39:14.938074 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:14Z","lastTransitionTime":"2025-10-06T13:39:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:15 crc kubenswrapper[4757]: I1006 13:39:15.040901 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:15 crc kubenswrapper[4757]: I1006 13:39:15.040967 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:15 crc kubenswrapper[4757]: I1006 13:39:15.040989 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:15 crc kubenswrapper[4757]: I1006 13:39:15.041011 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:15 crc kubenswrapper[4757]: I1006 13:39:15.041032 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:15Z","lastTransitionTime":"2025-10-06T13:39:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:15 crc kubenswrapper[4757]: I1006 13:39:15.144043 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:15 crc kubenswrapper[4757]: I1006 13:39:15.144150 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:15 crc kubenswrapper[4757]: I1006 13:39:15.144169 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:15 crc kubenswrapper[4757]: I1006 13:39:15.144195 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:15 crc kubenswrapper[4757]: I1006 13:39:15.144213 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:15Z","lastTransitionTime":"2025-10-06T13:39:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:15 crc kubenswrapper[4757]: I1006 13:39:15.179917 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 06 13:39:15 crc kubenswrapper[4757]: E1006 13:39:15.180087 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 06 13:39:15 crc kubenswrapper[4757]: I1006 13:39:15.246807 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:15 crc kubenswrapper[4757]: I1006 13:39:15.246845 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:15 crc kubenswrapper[4757]: I1006 13:39:15.246855 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:15 crc kubenswrapper[4757]: I1006 13:39:15.246870 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:15 crc kubenswrapper[4757]: I1006 13:39:15.246881 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:15Z","lastTransitionTime":"2025-10-06T13:39:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:15 crc kubenswrapper[4757]: I1006 13:39:15.349882 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:15 crc kubenswrapper[4757]: I1006 13:39:15.349937 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:15 crc kubenswrapper[4757]: I1006 13:39:15.349948 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:15 crc kubenswrapper[4757]: I1006 13:39:15.349968 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:15 crc kubenswrapper[4757]: I1006 13:39:15.349983 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:15Z","lastTransitionTime":"2025-10-06T13:39:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:15 crc kubenswrapper[4757]: I1006 13:39:15.452973 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:15 crc kubenswrapper[4757]: I1006 13:39:15.453035 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:15 crc kubenswrapper[4757]: I1006 13:39:15.453048 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:15 crc kubenswrapper[4757]: I1006 13:39:15.453066 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:15 crc kubenswrapper[4757]: I1006 13:39:15.453079 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:15Z","lastTransitionTime":"2025-10-06T13:39:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:15 crc kubenswrapper[4757]: I1006 13:39:15.584343 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:15 crc kubenswrapper[4757]: I1006 13:39:15.584391 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:15 crc kubenswrapper[4757]: I1006 13:39:15.584404 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:15 crc kubenswrapper[4757]: I1006 13:39:15.584419 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:15 crc kubenswrapper[4757]: I1006 13:39:15.584431 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:15Z","lastTransitionTime":"2025-10-06T13:39:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:15 crc kubenswrapper[4757]: I1006 13:39:15.686971 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:15 crc kubenswrapper[4757]: I1006 13:39:15.687033 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:15 crc kubenswrapper[4757]: I1006 13:39:15.687050 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:15 crc kubenswrapper[4757]: I1006 13:39:15.687075 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:15 crc kubenswrapper[4757]: I1006 13:39:15.687116 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:15Z","lastTransitionTime":"2025-10-06T13:39:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:15 crc kubenswrapper[4757]: I1006 13:39:15.790168 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:15 crc kubenswrapper[4757]: I1006 13:39:15.790221 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:15 crc kubenswrapper[4757]: I1006 13:39:15.790237 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:15 crc kubenswrapper[4757]: I1006 13:39:15.790259 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:15 crc kubenswrapper[4757]: I1006 13:39:15.790276 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:15Z","lastTransitionTime":"2025-10-06T13:39:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:15 crc kubenswrapper[4757]: I1006 13:39:15.893697 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:15 crc kubenswrapper[4757]: I1006 13:39:15.893754 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:15 crc kubenswrapper[4757]: I1006 13:39:15.893771 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:15 crc kubenswrapper[4757]: I1006 13:39:15.893796 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:15 crc kubenswrapper[4757]: I1006 13:39:15.893815 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:15Z","lastTransitionTime":"2025-10-06T13:39:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:15 crc kubenswrapper[4757]: I1006 13:39:15.997670 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:15 crc kubenswrapper[4757]: I1006 13:39:15.997722 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:15 crc kubenswrapper[4757]: I1006 13:39:15.997737 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:15 crc kubenswrapper[4757]: I1006 13:39:15.997761 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:15 crc kubenswrapper[4757]: I1006 13:39:15.997778 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:15Z","lastTransitionTime":"2025-10-06T13:39:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:16 crc kubenswrapper[4757]: I1006 13:39:16.100945 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:16 crc kubenswrapper[4757]: I1006 13:39:16.100988 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:16 crc kubenswrapper[4757]: I1006 13:39:16.101000 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:16 crc kubenswrapper[4757]: I1006 13:39:16.101019 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:16 crc kubenswrapper[4757]: I1006 13:39:16.101029 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:16Z","lastTransitionTime":"2025-10-06T13:39:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:16 crc kubenswrapper[4757]: I1006 13:39:16.180037 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 06 13:39:16 crc kubenswrapper[4757]: I1006 13:39:16.180691 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-sc9qx" Oct 06 13:39:16 crc kubenswrapper[4757]: E1006 13:39:16.181693 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-sc9qx" podUID="8a0a24d2-8946-4710-91f2-cc59ecedb5e3" Oct 06 13:39:16 crc kubenswrapper[4757]: I1006 13:39:16.181293 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 06 13:39:16 crc kubenswrapper[4757]: E1006 13:39:16.182217 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 06 13:39:16 crc kubenswrapper[4757]: E1006 13:39:16.180994 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 06 13:39:16 crc kubenswrapper[4757]: I1006 13:39:16.204048 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:16 crc kubenswrapper[4757]: I1006 13:39:16.204154 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:16 crc kubenswrapper[4757]: I1006 13:39:16.204174 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:16 crc kubenswrapper[4757]: I1006 13:39:16.204198 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:16 crc kubenswrapper[4757]: I1006 13:39:16.204217 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:16Z","lastTransitionTime":"2025-10-06T13:39:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:16 crc kubenswrapper[4757]: I1006 13:39:16.307286 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:16 crc kubenswrapper[4757]: I1006 13:39:16.307367 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:16 crc kubenswrapper[4757]: I1006 13:39:16.307383 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:16 crc kubenswrapper[4757]: I1006 13:39:16.307409 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:16 crc kubenswrapper[4757]: I1006 13:39:16.307426 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:16Z","lastTransitionTime":"2025-10-06T13:39:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:16 crc kubenswrapper[4757]: I1006 13:39:16.410824 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:16 crc kubenswrapper[4757]: I1006 13:39:16.410872 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:16 crc kubenswrapper[4757]: I1006 13:39:16.410889 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:16 crc kubenswrapper[4757]: I1006 13:39:16.410912 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:16 crc kubenswrapper[4757]: I1006 13:39:16.410929 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:16Z","lastTransitionTime":"2025-10-06T13:39:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:16 crc kubenswrapper[4757]: I1006 13:39:16.514001 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:16 crc kubenswrapper[4757]: I1006 13:39:16.514063 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:16 crc kubenswrapper[4757]: I1006 13:39:16.514080 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:16 crc kubenswrapper[4757]: I1006 13:39:16.514131 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:16 crc kubenswrapper[4757]: I1006 13:39:16.514149 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:16Z","lastTransitionTime":"2025-10-06T13:39:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:16 crc kubenswrapper[4757]: I1006 13:39:16.618870 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:16 crc kubenswrapper[4757]: I1006 13:39:16.618932 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:16 crc kubenswrapper[4757]: I1006 13:39:16.618949 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:16 crc kubenswrapper[4757]: I1006 13:39:16.618980 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:16 crc kubenswrapper[4757]: I1006 13:39:16.618998 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:16Z","lastTransitionTime":"2025-10-06T13:39:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:16 crc kubenswrapper[4757]: I1006 13:39:16.725593 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:16 crc kubenswrapper[4757]: I1006 13:39:16.725697 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:16 crc kubenswrapper[4757]: I1006 13:39:16.725726 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:16 crc kubenswrapper[4757]: I1006 13:39:16.725760 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:16 crc kubenswrapper[4757]: I1006 13:39:16.725796 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:16Z","lastTransitionTime":"2025-10-06T13:39:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:16 crc kubenswrapper[4757]: I1006 13:39:16.829823 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:16 crc kubenswrapper[4757]: I1006 13:39:16.829903 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:16 crc kubenswrapper[4757]: I1006 13:39:16.829926 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:16 crc kubenswrapper[4757]: I1006 13:39:16.829951 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:16 crc kubenswrapper[4757]: I1006 13:39:16.829969 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:16Z","lastTransitionTime":"2025-10-06T13:39:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:16 crc kubenswrapper[4757]: I1006 13:39:16.933775 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:16 crc kubenswrapper[4757]: I1006 13:39:16.933838 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:16 crc kubenswrapper[4757]: I1006 13:39:16.933857 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:16 crc kubenswrapper[4757]: I1006 13:39:16.933883 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:16 crc kubenswrapper[4757]: I1006 13:39:16.933901 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:16Z","lastTransitionTime":"2025-10-06T13:39:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:17 crc kubenswrapper[4757]: I1006 13:39:17.037455 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:17 crc kubenswrapper[4757]: I1006 13:39:17.037509 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:17 crc kubenswrapper[4757]: I1006 13:39:17.037526 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:17 crc kubenswrapper[4757]: I1006 13:39:17.037549 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:17 crc kubenswrapper[4757]: I1006 13:39:17.037567 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:17Z","lastTransitionTime":"2025-10-06T13:39:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:17 crc kubenswrapper[4757]: I1006 13:39:17.141775 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:17 crc kubenswrapper[4757]: I1006 13:39:17.141828 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:17 crc kubenswrapper[4757]: I1006 13:39:17.141848 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:17 crc kubenswrapper[4757]: I1006 13:39:17.141873 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:17 crc kubenswrapper[4757]: I1006 13:39:17.141890 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:17Z","lastTransitionTime":"2025-10-06T13:39:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:17 crc kubenswrapper[4757]: I1006 13:39:17.179476 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 06 13:39:17 crc kubenswrapper[4757]: E1006 13:39:17.179709 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 06 13:39:17 crc kubenswrapper[4757]: I1006 13:39:17.244595 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:17 crc kubenswrapper[4757]: I1006 13:39:17.244669 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:17 crc kubenswrapper[4757]: I1006 13:39:17.244693 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:17 crc kubenswrapper[4757]: I1006 13:39:17.244723 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:17 crc kubenswrapper[4757]: I1006 13:39:17.244744 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:17Z","lastTransitionTime":"2025-10-06T13:39:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:17 crc kubenswrapper[4757]: I1006 13:39:17.348556 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:17 crc kubenswrapper[4757]: I1006 13:39:17.348634 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:17 crc kubenswrapper[4757]: I1006 13:39:17.348672 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:17 crc kubenswrapper[4757]: I1006 13:39:17.348708 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:17 crc kubenswrapper[4757]: I1006 13:39:17.348731 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:17Z","lastTransitionTime":"2025-10-06T13:39:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:17 crc kubenswrapper[4757]: I1006 13:39:17.451258 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:17 crc kubenswrapper[4757]: I1006 13:39:17.451605 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:17 crc kubenswrapper[4757]: I1006 13:39:17.451670 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:17 crc kubenswrapper[4757]: I1006 13:39:17.451742 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:17 crc kubenswrapper[4757]: I1006 13:39:17.451805 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:17Z","lastTransitionTime":"2025-10-06T13:39:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:17 crc kubenswrapper[4757]: I1006 13:39:17.554851 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:17 crc kubenswrapper[4757]: I1006 13:39:17.555282 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:17 crc kubenswrapper[4757]: I1006 13:39:17.555458 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:17 crc kubenswrapper[4757]: I1006 13:39:17.555624 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:17 crc kubenswrapper[4757]: I1006 13:39:17.555765 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:17Z","lastTransitionTime":"2025-10-06T13:39:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:17 crc kubenswrapper[4757]: I1006 13:39:17.659161 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:17 crc kubenswrapper[4757]: I1006 13:39:17.659227 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:17 crc kubenswrapper[4757]: I1006 13:39:17.659248 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:17 crc kubenswrapper[4757]: I1006 13:39:17.659271 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:17 crc kubenswrapper[4757]: I1006 13:39:17.659288 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:17Z","lastTransitionTime":"2025-10-06T13:39:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:17 crc kubenswrapper[4757]: I1006 13:39:17.762410 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:17 crc kubenswrapper[4757]: I1006 13:39:17.762460 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:17 crc kubenswrapper[4757]: I1006 13:39:17.762475 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:17 crc kubenswrapper[4757]: I1006 13:39:17.762494 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:17 crc kubenswrapper[4757]: I1006 13:39:17.762512 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:17Z","lastTransitionTime":"2025-10-06T13:39:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:17 crc kubenswrapper[4757]: I1006 13:39:17.865597 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:17 crc kubenswrapper[4757]: I1006 13:39:17.865664 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:17 crc kubenswrapper[4757]: I1006 13:39:17.865689 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:17 crc kubenswrapper[4757]: I1006 13:39:17.865720 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:17 crc kubenswrapper[4757]: I1006 13:39:17.865741 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:17Z","lastTransitionTime":"2025-10-06T13:39:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:17 crc kubenswrapper[4757]: I1006 13:39:17.968996 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:17 crc kubenswrapper[4757]: I1006 13:39:17.969034 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:17 crc kubenswrapper[4757]: I1006 13:39:17.969044 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:17 crc kubenswrapper[4757]: I1006 13:39:17.969059 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:17 crc kubenswrapper[4757]: I1006 13:39:17.969071 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:17Z","lastTransitionTime":"2025-10-06T13:39:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:18 crc kubenswrapper[4757]: I1006 13:39:18.071514 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:18 crc kubenswrapper[4757]: I1006 13:39:18.071546 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:18 crc kubenswrapper[4757]: I1006 13:39:18.071557 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:18 crc kubenswrapper[4757]: I1006 13:39:18.071569 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:18 crc kubenswrapper[4757]: I1006 13:39:18.071578 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:18Z","lastTransitionTime":"2025-10-06T13:39:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:18 crc kubenswrapper[4757]: I1006 13:39:18.174049 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:18 crc kubenswrapper[4757]: I1006 13:39:18.174166 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:18 crc kubenswrapper[4757]: I1006 13:39:18.174192 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:18 crc kubenswrapper[4757]: I1006 13:39:18.174224 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:18 crc kubenswrapper[4757]: I1006 13:39:18.174247 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:18Z","lastTransitionTime":"2025-10-06T13:39:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:18 crc kubenswrapper[4757]: I1006 13:39:18.179859 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 06 13:39:18 crc kubenswrapper[4757]: I1006 13:39:18.179948 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-sc9qx" Oct 06 13:39:18 crc kubenswrapper[4757]: E1006 13:39:18.180043 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 06 13:39:18 crc kubenswrapper[4757]: E1006 13:39:18.180202 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-sc9qx" podUID="8a0a24d2-8946-4710-91f2-cc59ecedb5e3" Oct 06 13:39:18 crc kubenswrapper[4757]: I1006 13:39:18.180415 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 06 13:39:18 crc kubenswrapper[4757]: E1006 13:39:18.180664 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 06 13:39:18 crc kubenswrapper[4757]: I1006 13:39:18.276611 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:18 crc kubenswrapper[4757]: I1006 13:39:18.276672 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:18 crc kubenswrapper[4757]: I1006 13:39:18.276691 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:18 crc kubenswrapper[4757]: I1006 13:39:18.276715 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:18 crc kubenswrapper[4757]: I1006 13:39:18.276733 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:18Z","lastTransitionTime":"2025-10-06T13:39:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:18 crc kubenswrapper[4757]: I1006 13:39:18.378793 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:18 crc kubenswrapper[4757]: I1006 13:39:18.378841 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:18 crc kubenswrapper[4757]: I1006 13:39:18.378853 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:18 crc kubenswrapper[4757]: I1006 13:39:18.378869 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:18 crc kubenswrapper[4757]: I1006 13:39:18.378883 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:18Z","lastTransitionTime":"2025-10-06T13:39:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:18 crc kubenswrapper[4757]: I1006 13:39:18.482574 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:18 crc kubenswrapper[4757]: I1006 13:39:18.482611 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:18 crc kubenswrapper[4757]: I1006 13:39:18.482620 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:18 crc kubenswrapper[4757]: I1006 13:39:18.482633 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:18 crc kubenswrapper[4757]: I1006 13:39:18.482642 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:18Z","lastTransitionTime":"2025-10-06T13:39:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:18 crc kubenswrapper[4757]: I1006 13:39:18.585805 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:18 crc kubenswrapper[4757]: I1006 13:39:18.585878 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:18 crc kubenswrapper[4757]: I1006 13:39:18.585902 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:18 crc kubenswrapper[4757]: I1006 13:39:18.585938 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:18 crc kubenswrapper[4757]: I1006 13:39:18.585962 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:18Z","lastTransitionTime":"2025-10-06T13:39:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:18 crc kubenswrapper[4757]: I1006 13:39:18.689444 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:18 crc kubenswrapper[4757]: I1006 13:39:18.689507 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:18 crc kubenswrapper[4757]: I1006 13:39:18.689545 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:18 crc kubenswrapper[4757]: I1006 13:39:18.689577 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:18 crc kubenswrapper[4757]: I1006 13:39:18.689600 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:18Z","lastTransitionTime":"2025-10-06T13:39:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:18 crc kubenswrapper[4757]: I1006 13:39:18.792636 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:18 crc kubenswrapper[4757]: I1006 13:39:18.792708 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:18 crc kubenswrapper[4757]: I1006 13:39:18.792730 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:18 crc kubenswrapper[4757]: I1006 13:39:18.792761 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:18 crc kubenswrapper[4757]: I1006 13:39:18.792796 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:18Z","lastTransitionTime":"2025-10-06T13:39:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:18 crc kubenswrapper[4757]: I1006 13:39:18.895516 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:18 crc kubenswrapper[4757]: I1006 13:39:18.895603 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:18 crc kubenswrapper[4757]: I1006 13:39:18.895631 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:18 crc kubenswrapper[4757]: I1006 13:39:18.895659 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:18 crc kubenswrapper[4757]: I1006 13:39:18.895678 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:18Z","lastTransitionTime":"2025-10-06T13:39:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:18 crc kubenswrapper[4757]: I1006 13:39:18.967520 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:18 crc kubenswrapper[4757]: I1006 13:39:18.967560 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:18 crc kubenswrapper[4757]: I1006 13:39:18.967568 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:18 crc kubenswrapper[4757]: I1006 13:39:18.967582 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:18 crc kubenswrapper[4757]: I1006 13:39:18.967594 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:18Z","lastTransitionTime":"2025-10-06T13:39:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:18 crc kubenswrapper[4757]: E1006 13:39:18.991763 4757 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:39:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:39:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:18Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:39:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:39:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:18Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e1ec82ec-57e6-47de-8235-c2486415aecd\\\",\\\"systemUUID\\\":\\\"77015af3-b2cf-40c4-8ed8-504c8efcff1f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:18Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:18 crc kubenswrapper[4757]: I1006 13:39:18.998293 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:18 crc kubenswrapper[4757]: I1006 13:39:18.998530 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:18 crc kubenswrapper[4757]: I1006 13:39:18.998740 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:18 crc kubenswrapper[4757]: I1006 13:39:18.998958 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:18 crc kubenswrapper[4757]: I1006 13:39:18.999169 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:18Z","lastTransitionTime":"2025-10-06T13:39:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:19 crc kubenswrapper[4757]: E1006 13:39:19.020041 4757 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:39:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:39:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:18Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:39:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:39:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:18Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e1ec82ec-57e6-47de-8235-c2486415aecd\\\",\\\"systemUUID\\\":\\\"77015af3-b2cf-40c4-8ed8-504c8efcff1f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:19Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:19 crc kubenswrapper[4757]: I1006 13:39:19.025164 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:19 crc kubenswrapper[4757]: I1006 13:39:19.025416 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:19 crc kubenswrapper[4757]: I1006 13:39:19.025651 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:19 crc kubenswrapper[4757]: I1006 13:39:19.025901 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:19 crc kubenswrapper[4757]: I1006 13:39:19.026163 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:19Z","lastTransitionTime":"2025-10-06T13:39:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:19 crc kubenswrapper[4757]: E1006 13:39:19.049242 4757 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:39:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:39:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:19Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:39:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:39:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:19Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e1ec82ec-57e6-47de-8235-c2486415aecd\\\",\\\"systemUUID\\\":\\\"77015af3-b2cf-40c4-8ed8-504c8efcff1f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:19Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:19 crc kubenswrapper[4757]: I1006 13:39:19.054986 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:19 crc kubenswrapper[4757]: I1006 13:39:19.055266 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:19 crc kubenswrapper[4757]: I1006 13:39:19.055414 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:19 crc kubenswrapper[4757]: I1006 13:39:19.055597 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:19 crc kubenswrapper[4757]: I1006 13:39:19.055770 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:19Z","lastTransitionTime":"2025-10-06T13:39:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:19 crc kubenswrapper[4757]: E1006 13:39:19.075720 4757 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:39:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:39:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:19Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:39:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:39:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:19Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e1ec82ec-57e6-47de-8235-c2486415aecd\\\",\\\"systemUUID\\\":\\\"77015af3-b2cf-40c4-8ed8-504c8efcff1f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:19Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:19 crc kubenswrapper[4757]: I1006 13:39:19.079717 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:19 crc kubenswrapper[4757]: I1006 13:39:19.079779 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:19 crc kubenswrapper[4757]: I1006 13:39:19.079793 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:19 crc kubenswrapper[4757]: I1006 13:39:19.079812 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:19 crc kubenswrapper[4757]: I1006 13:39:19.079823 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:19Z","lastTransitionTime":"2025-10-06T13:39:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:19 crc kubenswrapper[4757]: E1006 13:39:19.098342 4757 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:39:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:39:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:19Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:39:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:39:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:19Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e1ec82ec-57e6-47de-8235-c2486415aecd\\\",\\\"systemUUID\\\":\\\"77015af3-b2cf-40c4-8ed8-504c8efcff1f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:19Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:19 crc kubenswrapper[4757]: E1006 13:39:19.098495 4757 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 06 13:39:19 crc kubenswrapper[4757]: I1006 13:39:19.100767 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:19 crc kubenswrapper[4757]: I1006 13:39:19.100806 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:19 crc kubenswrapper[4757]: I1006 13:39:19.100822 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:19 crc kubenswrapper[4757]: I1006 13:39:19.100845 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:19 crc kubenswrapper[4757]: I1006 13:39:19.100862 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:19Z","lastTransitionTime":"2025-10-06T13:39:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:19 crc kubenswrapper[4757]: I1006 13:39:19.179734 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 06 13:39:19 crc kubenswrapper[4757]: E1006 13:39:19.179925 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 06 13:39:19 crc kubenswrapper[4757]: I1006 13:39:19.181216 4757 scope.go:117] "RemoveContainer" containerID="81720dfd043047aefa11096d2632cf7b232bb217cf2f674170f00aaeeb811f64" Oct 06 13:39:19 crc kubenswrapper[4757]: I1006 13:39:19.206763 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:19 crc kubenswrapper[4757]: I1006 13:39:19.206843 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:19 crc kubenswrapper[4757]: I1006 13:39:19.206877 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:19 crc kubenswrapper[4757]: I1006 13:39:19.206911 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:19 crc kubenswrapper[4757]: I1006 13:39:19.206935 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:19Z","lastTransitionTime":"2025-10-06T13:39:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:19 crc kubenswrapper[4757]: I1006 13:39:19.310405 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:19 crc kubenswrapper[4757]: I1006 13:39:19.310647 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:19 crc kubenswrapper[4757]: I1006 13:39:19.310714 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:19 crc kubenswrapper[4757]: I1006 13:39:19.310778 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:19 crc kubenswrapper[4757]: I1006 13:39:19.310853 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:19Z","lastTransitionTime":"2025-10-06T13:39:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:19 crc kubenswrapper[4757]: I1006 13:39:19.413755 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:19 crc kubenswrapper[4757]: I1006 13:39:19.413803 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:19 crc kubenswrapper[4757]: I1006 13:39:19.413813 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:19 crc kubenswrapper[4757]: I1006 13:39:19.413834 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:19 crc kubenswrapper[4757]: I1006 13:39:19.413847 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:19Z","lastTransitionTime":"2025-10-06T13:39:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:19 crc kubenswrapper[4757]: I1006 13:39:19.509743 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-58bhb_a6624d05-e024-49f2-bf87-33e7ea4fccbb/ovnkube-controller/1.log" Oct 06 13:39:19 crc kubenswrapper[4757]: I1006 13:39:19.514663 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" event={"ID":"a6624d05-e024-49f2-bf87-33e7ea4fccbb","Type":"ContainerStarted","Data":"737f180b936ea2af92cdc2a3c77290ee6ad30c131c7fd3301562f96387c8a211"} Oct 06 13:39:19 crc kubenswrapper[4757]: I1006 13:39:19.516392 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:19 crc kubenswrapper[4757]: I1006 13:39:19.516420 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:19 crc kubenswrapper[4757]: I1006 13:39:19.516433 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:19 crc kubenswrapper[4757]: I1006 13:39:19.516447 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:19 crc kubenswrapper[4757]: I1006 13:39:19.516459 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:19Z","lastTransitionTime":"2025-10-06T13:39:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:19 crc kubenswrapper[4757]: I1006 13:39:19.516514 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" Oct 06 13:39:19 crc kubenswrapper[4757]: I1006 13:39:19.533327 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aab0f26-c4f7-40b2-892f-0c7d8586a1c1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4119269e9f663bc5c38f4fdc8a5b32f1c3d8446fed6fd36172c41207c9c287b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1bef054ed2b7c27f200f7ab089158ba9946ca17b507c85c377504f539812fcf6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb90630132db1396d9009fe35d6d6b8ab6afe59d32f196cf7a10f3179103227\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://123bbe58d8664c8fad1b86c9c321228f7ffe7cdf14c44e1d9be7802e10a3f0d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://813c083e0e8ba3b397a4b2795de1eac82299d7f673832272138f20162b7f2db2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-06T13:38:46Z\\\",\\\"message\\\":\\\"W1006 13:38:35.655009 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1006 13:38:35.655588 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759757915 cert, and key in /tmp/serving-cert-2033919833/serving-signer.crt, /tmp/serving-cert-2033919833/serving-signer.key\\\\nI1006 13:38:35.906575 1 observer_polling.go:159] Starting file observer\\\\nW1006 13:38:35.908632 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1006 13:38:35.908812 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1006 13:38:35.911572 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2033919833/tls.crt::/tmp/serving-cert-2033919833/tls.key\\\\\\\"\\\\nF1006 13:38:46.393904 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:35Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18af66d22d5905beeec34c4d158a88aadb1a8076e423996cd42e4172a31426e0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:35Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca584765a9838801f99c7fb1f7bbcc740e684033b1b19035ef95b72526c6eda1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca584765a9838801f99c7fb1f7bbcc740e684033b1b19035ef95b72526c6eda1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:32Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:19Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:19 crc kubenswrapper[4757]: I1006 13:39:19.547125 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d100743e6a2245c99a227c3380c1e9cbdfec0eff361d21977a3e313fa8f7bfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:19Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:19 crc kubenswrapper[4757]: I1006 13:39:19.561632 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0010c888-d5ad-4b2b-8309-1647fdf0dee3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a2028c8aaf6b1504a3b00edc15272958a5254f6e621734988e959fe0720f1ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kvf5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://151c97566cda63000d69b51293c84fad70c6e331d3a1b62bd3a03ee5732d5214\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kvf5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-7tb7h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:19Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:19 crc kubenswrapper[4757]: I1006 13:39:19.581026 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9qf7z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9144d9fd-70d7-4a29-8e6b-c020c611980a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7771512cbe14fa9b8d61be3a354a5fb8436b84ddf455ae4426440f1599f7ad90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mf4fb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9qf7z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:19Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:19 crc kubenswrapper[4757]: I1006 13:39:19.602538 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rhrzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62d4cce6-0583-40a6-b7ea-2996d07b49b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://886c6f797531b702123d9931997b7b5e3ebcf9c14c566ff90e0382eae806f925\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a6cfbbedbad94b500e9bc7c5777459ffbf3db038e8a520a2c9c28d381955df55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a6cfbbedbad94b500e9bc7c5777459ffbf3db038e8a520a2c9c28d381955df55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8c16d3a08acde90ccf1beb5e9ced373395af7436ddbc05c24f6489f6f49fc75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8c16d3a08acde90ccf1beb5e9ced373395af7436ddbc05c24f6489f6f49fc75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7471a1d670fcee421cb43e0a8b8a2b5e15c9d428659f8d63be54895493225242\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7471a1d670fcee421cb43e0a8b8a2b5e15c9d428659f8d63be54895493225242\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d5889f0b8d556cbb5791eb0d9d5475c4fa8beb0c5e9614081c8a7002353282a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d5889f0b8d556cbb5791eb0d9d5475c4fa8beb0c5e9614081c8a7002353282a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0e664e1241d006c37ace1635ef93d30b3062960868874ab67b024d872e8b352\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0e664e1241d006c37ace1635ef93d30b3062960868874ab67b024d872e8b352\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4cbf946357ea3bc8763e1c1b16d8794976662c55043426132f1ece7c52dca82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4cbf946357ea3bc8763e1c1b16d8794976662c55043426132f1ece7c52dca82\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rhrzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:19Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:19 crc kubenswrapper[4757]: I1006 13:39:19.619802 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:19 crc kubenswrapper[4757]: I1006 13:39:19.619833 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:19 crc kubenswrapper[4757]: I1006 13:39:19.619845 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:19 crc kubenswrapper[4757]: I1006 13:39:19.619862 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:19 crc kubenswrapper[4757]: I1006 13:39:19.619874 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:19Z","lastTransitionTime":"2025-10-06T13:39:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:19 crc kubenswrapper[4757]: I1006 13:39:19.623561 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-j889t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a097c3c1-7cfd-4ba8-878d-40b971843e92\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cf3eee034e9bc6cc2729e4cd23569f535ea8d2058f1576f75baaa5cf8407f2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-29wtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:54Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-j889t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:19Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:19 crc kubenswrapper[4757]: I1006 13:39:19.638585 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l2rjj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"34af6a7e-1458-4f7c-bc54-69fb80966b6b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://53547fa1f6011c3003bed92bbd7200050777786dd1fe8b3979b8d9c695c7259d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:39:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5csnl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3d629cc4ee4a614d547fe962c1367b1717f185765e1ccc325fd941f37e3d656\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:39:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5csnl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:39:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-l2rjj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:19Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:19 crc kubenswrapper[4757]: I1006 13:39:19.672729 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cd6138e-cd0c-4140-8173-eeb737e5b20a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e5392ae609235f0d0526e4d115125c20a75af131b89c07ae6bfa3b4bac84108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d06bc2afab6df22e2c8140de3e6288a3e5f4190b6bdb31a2e71170275e70cae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://056d1a4bdaee6e39a0ba0105c9b48d23cd37c37936951e8e2aa123f6de93463d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e08edbfb6418e26eb6c3ef0d229a623bc625e8e20c29d41a95adc8ac8dc2534\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:32Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:19Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:19 crc kubenswrapper[4757]: I1006 13:39:19.696551 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:19Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:19 crc kubenswrapper[4757]: I1006 13:39:19.711727 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b45680ebd17f15c56456a2c0eb5c7b58f6f42524be18df11dacbb3f4d904eac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6454ffe81c62fdfd6a5c0ee4aff48e55b1dd83b14b07a93f915fc3023f6bd255\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:19Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:19 crc kubenswrapper[4757]: I1006 13:39:19.722795 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:19 crc kubenswrapper[4757]: I1006 13:39:19.722839 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:19 crc kubenswrapper[4757]: I1006 13:39:19.722850 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:19 crc kubenswrapper[4757]: I1006 13:39:19.722867 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:19 crc kubenswrapper[4757]: I1006 13:39:19.722878 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:19Z","lastTransitionTime":"2025-10-06T13:39:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:19 crc kubenswrapper[4757]: I1006 13:39:19.729596 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:19Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:19 crc kubenswrapper[4757]: I1006 13:39:19.742833 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9j5jn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de18b9fe-e396-469e-a6f6-d87ce91f3270\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4b5ed23b64e4bbc9f76f8cdef77d7f608d8ba029b540b69f64faf266f843155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2cslg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9j5jn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:19Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:19 crc kubenswrapper[4757]: I1006 13:39:19.757840 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70151c959d98e2cb056e57e50b67c68b1c031beb375b357c8941bc9b81dc040f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:19Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:19 crc kubenswrapper[4757]: I1006 13:39:19.772964 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:19Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:19 crc kubenswrapper[4757]: I1006 13:39:19.800479 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6624d05-e024-49f2-bf87-33e7ea4fccbb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbe06e8892901366539846fa68ea4e379c32ca7b21843383dc4a72ac70f92b98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75c8038f49690378c7322e4d2e50f7d5073bb744c3f64317b7c3f4acffaa338d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://df2b012b79afa6379b4a7add72a9093bbdbfcc8d618d6eae350a773fdddacfa8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4093bde478f4910c2b1e2341bb0b3f640454b131e31b9ab7ad5900b8260b7561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1580d51cb7ec22f70787157dc80236a312eca77785329c663d78c02ee9a0685d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38df7deb98a16e465d5fdbad8e04c2e62c5cfb418a48fe9f01da335dba2907ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://737f180b936ea2af92cdc2a3c77290ee6ad30c131c7fd3301562f96387c8a211\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81720dfd043047aefa11096d2632cf7b232bb217cf2f674170f00aaeeb811f64\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-06T13:39:04Z\\\",\\\"message\\\":\\\"\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1006 13:39:04.366323 6196 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-console/console\\\\\\\"}\\\\nI1006 13:39:04.366339 6196 services_controller.go:360] Finished syncing service console on namespace openshift-console for network=default : 2.436929ms\\\\nI1006 13:39:04.366598 6196 obj_retry.go:551] Creating *factory.egressNode crc took: 4.572966ms\\\\nI1006 13:39:04.366619 6196 factory.go:1336] Added *v1.Node event handler 7\\\\nI1006 13:39:04.366642 6196 factory.go:1336] Added *v1.EgressIP event handler 8\\\\nI1006 13:39:04.366868 6196 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI1006 13:39:04.366989 6196 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI1006 13:39:04.367026 6196 ovnkube.go:599] Stopped ovnkube\\\\nI1006 13:39:04.367053 6196 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1006 13:39:04.367182 6196 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: failed to add event handler: handler {0x1e60340 0x1e60020 0x1e5ffc0} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotatio\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-06T13:39:03Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5852bc46ae07b295a97cca9f5ef7d06484c12744c0f6ee57b2d14acee319fcaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3fe5f1f425ddd770c518939dee061dc2641c1ca6c3538d8d30ac0724a6353936\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3fe5f1f425ddd770c518939dee061dc2641c1ca6c3538d8d30ac0724a6353936\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-58bhb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:19Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:19 crc kubenswrapper[4757]: I1006 13:39:19.813693 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-sc9qx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a0a24d2-8946-4710-91f2-cc59ecedb5e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwpmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwpmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:39:06Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-sc9qx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:19Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:19 crc kubenswrapper[4757]: I1006 13:39:19.825990 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:19 crc kubenswrapper[4757]: I1006 13:39:19.826035 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:19 crc kubenswrapper[4757]: I1006 13:39:19.826049 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:19 crc kubenswrapper[4757]: I1006 13:39:19.826067 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:19 crc kubenswrapper[4757]: I1006 13:39:19.826081 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:19Z","lastTransitionTime":"2025-10-06T13:39:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:19 crc kubenswrapper[4757]: I1006 13:39:19.929418 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:19 crc kubenswrapper[4757]: I1006 13:39:19.929474 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:19 crc kubenswrapper[4757]: I1006 13:39:19.929483 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:19 crc kubenswrapper[4757]: I1006 13:39:19.929506 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:19 crc kubenswrapper[4757]: I1006 13:39:19.929524 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:19Z","lastTransitionTime":"2025-10-06T13:39:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:20 crc kubenswrapper[4757]: I1006 13:39:20.032912 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:20 crc kubenswrapper[4757]: I1006 13:39:20.032960 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:20 crc kubenswrapper[4757]: I1006 13:39:20.032970 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:20 crc kubenswrapper[4757]: I1006 13:39:20.032989 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:20 crc kubenswrapper[4757]: I1006 13:39:20.033000 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:20Z","lastTransitionTime":"2025-10-06T13:39:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:20 crc kubenswrapper[4757]: I1006 13:39:20.136346 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:20 crc kubenswrapper[4757]: I1006 13:39:20.136390 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:20 crc kubenswrapper[4757]: I1006 13:39:20.136398 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:20 crc kubenswrapper[4757]: I1006 13:39:20.136415 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:20 crc kubenswrapper[4757]: I1006 13:39:20.136425 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:20Z","lastTransitionTime":"2025-10-06T13:39:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:20 crc kubenswrapper[4757]: I1006 13:39:20.179259 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-sc9qx" Oct 06 13:39:20 crc kubenswrapper[4757]: I1006 13:39:20.179338 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 06 13:39:20 crc kubenswrapper[4757]: I1006 13:39:20.179406 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 06 13:39:20 crc kubenswrapper[4757]: E1006 13:39:20.179428 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-sc9qx" podUID="8a0a24d2-8946-4710-91f2-cc59ecedb5e3" Oct 06 13:39:20 crc kubenswrapper[4757]: E1006 13:39:20.179679 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 06 13:39:20 crc kubenswrapper[4757]: E1006 13:39:20.179797 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 06 13:39:20 crc kubenswrapper[4757]: I1006 13:39:20.239430 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:20 crc kubenswrapper[4757]: I1006 13:39:20.239502 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:20 crc kubenswrapper[4757]: I1006 13:39:20.239528 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:20 crc kubenswrapper[4757]: I1006 13:39:20.239556 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:20 crc kubenswrapper[4757]: I1006 13:39:20.239576 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:20Z","lastTransitionTime":"2025-10-06T13:39:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:20 crc kubenswrapper[4757]: I1006 13:39:20.342903 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:20 crc kubenswrapper[4757]: I1006 13:39:20.342974 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:20 crc kubenswrapper[4757]: I1006 13:39:20.342998 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:20 crc kubenswrapper[4757]: I1006 13:39:20.343028 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:20 crc kubenswrapper[4757]: I1006 13:39:20.343051 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:20Z","lastTransitionTime":"2025-10-06T13:39:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:20 crc kubenswrapper[4757]: I1006 13:39:20.446182 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:20 crc kubenswrapper[4757]: I1006 13:39:20.446236 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:20 crc kubenswrapper[4757]: I1006 13:39:20.446245 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:20 crc kubenswrapper[4757]: I1006 13:39:20.446265 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:20 crc kubenswrapper[4757]: I1006 13:39:20.446278 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:20Z","lastTransitionTime":"2025-10-06T13:39:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:20 crc kubenswrapper[4757]: I1006 13:39:20.521211 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-58bhb_a6624d05-e024-49f2-bf87-33e7ea4fccbb/ovnkube-controller/2.log" Oct 06 13:39:20 crc kubenswrapper[4757]: I1006 13:39:20.522755 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-58bhb_a6624d05-e024-49f2-bf87-33e7ea4fccbb/ovnkube-controller/1.log" Oct 06 13:39:20 crc kubenswrapper[4757]: I1006 13:39:20.527164 4757 generic.go:334] "Generic (PLEG): container finished" podID="a6624d05-e024-49f2-bf87-33e7ea4fccbb" containerID="737f180b936ea2af92cdc2a3c77290ee6ad30c131c7fd3301562f96387c8a211" exitCode=1 Oct 06 13:39:20 crc kubenswrapper[4757]: I1006 13:39:20.527205 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" event={"ID":"a6624d05-e024-49f2-bf87-33e7ea4fccbb","Type":"ContainerDied","Data":"737f180b936ea2af92cdc2a3c77290ee6ad30c131c7fd3301562f96387c8a211"} Oct 06 13:39:20 crc kubenswrapper[4757]: I1006 13:39:20.527241 4757 scope.go:117] "RemoveContainer" containerID="81720dfd043047aefa11096d2632cf7b232bb217cf2f674170f00aaeeb811f64" Oct 06 13:39:20 crc kubenswrapper[4757]: I1006 13:39:20.528709 4757 scope.go:117] "RemoveContainer" containerID="737f180b936ea2af92cdc2a3c77290ee6ad30c131c7fd3301562f96387c8a211" Oct 06 13:39:20 crc kubenswrapper[4757]: E1006 13:39:20.529352 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-58bhb_openshift-ovn-kubernetes(a6624d05-e024-49f2-bf87-33e7ea4fccbb)\"" pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" podUID="a6624d05-e024-49f2-bf87-33e7ea4fccbb" Oct 06 13:39:20 crc kubenswrapper[4757]: I1006 13:39:20.550199 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:20 crc kubenswrapper[4757]: I1006 13:39:20.550248 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:20 crc kubenswrapper[4757]: I1006 13:39:20.550261 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:20 crc kubenswrapper[4757]: I1006 13:39:20.550278 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:20 crc kubenswrapper[4757]: I1006 13:39:20.550291 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:20Z","lastTransitionTime":"2025-10-06T13:39:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:20 crc kubenswrapper[4757]: I1006 13:39:20.551014 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b45680ebd17f15c56456a2c0eb5c7b58f6f42524be18df11dacbb3f4d904eac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6454ffe81c62fdfd6a5c0ee4aff48e55b1dd83b14b07a93f915fc3023f6bd255\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:20Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:20 crc kubenswrapper[4757]: I1006 13:39:20.572915 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:20Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:20 crc kubenswrapper[4757]: I1006 13:39:20.592347 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70151c959d98e2cb056e57e50b67c68b1c031beb375b357c8941bc9b81dc040f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:20Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:20 crc kubenswrapper[4757]: I1006 13:39:20.612920 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:20Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:20 crc kubenswrapper[4757]: I1006 13:39:20.643621 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6624d05-e024-49f2-bf87-33e7ea4fccbb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbe06e8892901366539846fa68ea4e379c32ca7b21843383dc4a72ac70f92b98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75c8038f49690378c7322e4d2e50f7d5073bb744c3f64317b7c3f4acffaa338d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://df2b012b79afa6379b4a7add72a9093bbdbfcc8d618d6eae350a773fdddacfa8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4093bde478f4910c2b1e2341bb0b3f640454b131e31b9ab7ad5900b8260b7561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1580d51cb7ec22f70787157dc80236a312eca77785329c663d78c02ee9a0685d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38df7deb98a16e465d5fdbad8e04c2e62c5cfb418a48fe9f01da335dba2907ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://737f180b936ea2af92cdc2a3c77290ee6ad30c131c7fd3301562f96387c8a211\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81720dfd043047aefa11096d2632cf7b232bb217cf2f674170f00aaeeb811f64\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-06T13:39:04Z\\\",\\\"message\\\":\\\"\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1006 13:39:04.366323 6196 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-console/console\\\\\\\"}\\\\nI1006 13:39:04.366339 6196 services_controller.go:360] Finished syncing service console on namespace openshift-console for network=default : 2.436929ms\\\\nI1006 13:39:04.366598 6196 obj_retry.go:551] Creating *factory.egressNode crc took: 4.572966ms\\\\nI1006 13:39:04.366619 6196 factory.go:1336] Added *v1.Node event handler 7\\\\nI1006 13:39:04.366642 6196 factory.go:1336] Added *v1.EgressIP event handler 8\\\\nI1006 13:39:04.366868 6196 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI1006 13:39:04.366989 6196 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI1006 13:39:04.367026 6196 ovnkube.go:599] Stopped ovnkube\\\\nI1006 13:39:04.367053 6196 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1006 13:39:04.367182 6196 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: failed to add event handler: handler {0x1e60340 0x1e60020 0x1e5ffc0} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotatio\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-06T13:39:03Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://737f180b936ea2af92cdc2a3c77290ee6ad30c131c7fd3301562f96387c8a211\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-06T13:39:20Z\\\",\\\"message\\\":\\\"e (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1006 13:39:20.135763 6389 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1006 13:39:20.135790 6389 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1006 13:39:20.135830 6389 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1006 13:39:20.135845 6389 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1006 13:39:20.135848 6389 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1006 13:39:20.135872 6389 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1006 13:39:20.135873 6389 handler.go:208] Removed *v1.Node event handler 2\\\\nI1006 13:39:20.135888 6389 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1006 13:39:20.135916 6389 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1006 13:39:20.135920 6389 handler.go:208] Removed *v1.Node event handler 7\\\\nI1006 13:39:20.135944 6389 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1006 13:39:20.135983 6389 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1006 13:39:20.135991 6389 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1006 13:39:20.135996 6389 factory.go:656] Stopping watch factory\\\\nI1006 13:39:20.136011 6389 handler.go:208] Removed *v1.EgressFirewall ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-06T13:39:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5852bc46ae07b295a97cca9f5ef7d06484c12744c0f6ee57b2d14acee319fcaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3fe5f1f425ddd770c518939dee061dc2641c1ca6c3538d8d30ac0724a6353936\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3fe5f1f425ddd770c518939dee061dc2641c1ca6c3538d8d30ac0724a6353936\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-58bhb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:20Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:20 crc kubenswrapper[4757]: I1006 13:39:20.653369 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:20 crc kubenswrapper[4757]: I1006 13:39:20.653412 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:20 crc kubenswrapper[4757]: I1006 13:39:20.653425 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:20 crc kubenswrapper[4757]: I1006 13:39:20.653441 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:20 crc kubenswrapper[4757]: I1006 13:39:20.653455 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:20Z","lastTransitionTime":"2025-10-06T13:39:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:20 crc kubenswrapper[4757]: I1006 13:39:20.660803 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-sc9qx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a0a24d2-8946-4710-91f2-cc59ecedb5e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwpmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwpmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:39:06Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-sc9qx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:20Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:20 crc kubenswrapper[4757]: I1006 13:39:20.681123 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:20Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:20 crc kubenswrapper[4757]: I1006 13:39:20.699286 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9j5jn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de18b9fe-e396-469e-a6f6-d87ce91f3270\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4b5ed23b64e4bbc9f76f8cdef77d7f608d8ba029b540b69f64faf266f843155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2cslg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9j5jn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:20Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:20 crc kubenswrapper[4757]: I1006 13:39:20.716663 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aab0f26-c4f7-40b2-892f-0c7d8586a1c1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4119269e9f663bc5c38f4fdc8a5b32f1c3d8446fed6fd36172c41207c9c287b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1bef054ed2b7c27f200f7ab089158ba9946ca17b507c85c377504f539812fcf6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb90630132db1396d9009fe35d6d6b8ab6afe59d32f196cf7a10f3179103227\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://123bbe58d8664c8fad1b86c9c321228f7ffe7cdf14c44e1d9be7802e10a3f0d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://813c083e0e8ba3b397a4b2795de1eac82299d7f673832272138f20162b7f2db2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-06T13:38:46Z\\\",\\\"message\\\":\\\"W1006 13:38:35.655009 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1006 13:38:35.655588 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759757915 cert, and key in /tmp/serving-cert-2033919833/serving-signer.crt, /tmp/serving-cert-2033919833/serving-signer.key\\\\nI1006 13:38:35.906575 1 observer_polling.go:159] Starting file observer\\\\nW1006 13:38:35.908632 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1006 13:38:35.908812 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1006 13:38:35.911572 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2033919833/tls.crt::/tmp/serving-cert-2033919833/tls.key\\\\\\\"\\\\nF1006 13:38:46.393904 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:35Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18af66d22d5905beeec34c4d158a88aadb1a8076e423996cd42e4172a31426e0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:35Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca584765a9838801f99c7fb1f7bbcc740e684033b1b19035ef95b72526c6eda1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca584765a9838801f99c7fb1f7bbcc740e684033b1b19035ef95b72526c6eda1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:32Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:20Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:20 crc kubenswrapper[4757]: I1006 13:39:20.731320 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d100743e6a2245c99a227c3380c1e9cbdfec0eff361d21977a3e313fa8f7bfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:20Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:20 crc kubenswrapper[4757]: I1006 13:39:20.746432 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9qf7z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9144d9fd-70d7-4a29-8e6b-c020c611980a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7771512cbe14fa9b8d61be3a354a5fb8436b84ddf455ae4426440f1599f7ad90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mf4fb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9qf7z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:20Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:20 crc kubenswrapper[4757]: I1006 13:39:20.756489 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:20 crc kubenswrapper[4757]: I1006 13:39:20.756538 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:20 crc kubenswrapper[4757]: I1006 13:39:20.756553 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:20 crc kubenswrapper[4757]: I1006 13:39:20.756572 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:20 crc kubenswrapper[4757]: I1006 13:39:20.756585 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:20Z","lastTransitionTime":"2025-10-06T13:39:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:20 crc kubenswrapper[4757]: I1006 13:39:20.763060 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rhrzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62d4cce6-0583-40a6-b7ea-2996d07b49b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://886c6f797531b702123d9931997b7b5e3ebcf9c14c566ff90e0382eae806f925\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a6cfbbedbad94b500e9bc7c5777459ffbf3db038e8a520a2c9c28d381955df55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a6cfbbedbad94b500e9bc7c5777459ffbf3db038e8a520a2c9c28d381955df55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8c16d3a08acde90ccf1beb5e9ced373395af7436ddbc05c24f6489f6f49fc75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8c16d3a08acde90ccf1beb5e9ced373395af7436ddbc05c24f6489f6f49fc75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7471a1d670fcee421cb43e0a8b8a2b5e15c9d428659f8d63be54895493225242\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7471a1d670fcee421cb43e0a8b8a2b5e15c9d428659f8d63be54895493225242\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d5889f0b8d556cbb5791eb0d9d5475c4fa8beb0c5e9614081c8a7002353282a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d5889f0b8d556cbb5791eb0d9d5475c4fa8beb0c5e9614081c8a7002353282a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0e664e1241d006c37ace1635ef93d30b3062960868874ab67b024d872e8b352\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0e664e1241d006c37ace1635ef93d30b3062960868874ab67b024d872e8b352\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4cbf946357ea3bc8763e1c1b16d8794976662c55043426132f1ece7c52dca82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4cbf946357ea3bc8763e1c1b16d8794976662c55043426132f1ece7c52dca82\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rhrzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:20Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:20 crc kubenswrapper[4757]: I1006 13:39:20.782427 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-j889t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a097c3c1-7cfd-4ba8-878d-40b971843e92\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cf3eee034e9bc6cc2729e4cd23569f535ea8d2058f1576f75baaa5cf8407f2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-29wtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:54Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-j889t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:20Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:20 crc kubenswrapper[4757]: I1006 13:39:20.797684 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l2rjj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"34af6a7e-1458-4f7c-bc54-69fb80966b6b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://53547fa1f6011c3003bed92bbd7200050777786dd1fe8b3979b8d9c695c7259d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:39:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5csnl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3d629cc4ee4a614d547fe962c1367b1717f185765e1ccc325fd941f37e3d656\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:39:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5csnl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:39:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-l2rjj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:20Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:20 crc kubenswrapper[4757]: I1006 13:39:20.816601 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cd6138e-cd0c-4140-8173-eeb737e5b20a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e5392ae609235f0d0526e4d115125c20a75af131b89c07ae6bfa3b4bac84108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d06bc2afab6df22e2c8140de3e6288a3e5f4190b6bdb31a2e71170275e70cae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://056d1a4bdaee6e39a0ba0105c9b48d23cd37c37936951e8e2aa123f6de93463d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e08edbfb6418e26eb6c3ef0d229a623bc625e8e20c29d41a95adc8ac8dc2534\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:32Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:20Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:20 crc kubenswrapper[4757]: I1006 13:39:20.834404 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0010c888-d5ad-4b2b-8309-1647fdf0dee3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a2028c8aaf6b1504a3b00edc15272958a5254f6e621734988e959fe0720f1ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kvf5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://151c97566cda63000d69b51293c84fad70c6e331d3a1b62bd3a03ee5732d5214\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kvf5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-7tb7h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:20Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:20 crc kubenswrapper[4757]: I1006 13:39:20.859819 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:20 crc kubenswrapper[4757]: I1006 13:39:20.859881 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:20 crc kubenswrapper[4757]: I1006 13:39:20.859905 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:20 crc kubenswrapper[4757]: I1006 13:39:20.859935 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:20 crc kubenswrapper[4757]: I1006 13:39:20.859958 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:20Z","lastTransitionTime":"2025-10-06T13:39:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:20 crc kubenswrapper[4757]: I1006 13:39:20.963250 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:20 crc kubenswrapper[4757]: I1006 13:39:20.963327 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:20 crc kubenswrapper[4757]: I1006 13:39:20.963340 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:20 crc kubenswrapper[4757]: I1006 13:39:20.963362 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:20 crc kubenswrapper[4757]: I1006 13:39:20.963377 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:20Z","lastTransitionTime":"2025-10-06T13:39:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:21 crc kubenswrapper[4757]: I1006 13:39:21.066696 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:21 crc kubenswrapper[4757]: I1006 13:39:21.066766 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:21 crc kubenswrapper[4757]: I1006 13:39:21.066784 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:21 crc kubenswrapper[4757]: I1006 13:39:21.066810 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:21 crc kubenswrapper[4757]: I1006 13:39:21.066828 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:21Z","lastTransitionTime":"2025-10-06T13:39:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:21 crc kubenswrapper[4757]: I1006 13:39:21.170185 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:21 crc kubenswrapper[4757]: I1006 13:39:21.170253 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:21 crc kubenswrapper[4757]: I1006 13:39:21.170270 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:21 crc kubenswrapper[4757]: I1006 13:39:21.170296 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:21 crc kubenswrapper[4757]: I1006 13:39:21.170310 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:21Z","lastTransitionTime":"2025-10-06T13:39:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:21 crc kubenswrapper[4757]: I1006 13:39:21.179535 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 06 13:39:21 crc kubenswrapper[4757]: E1006 13:39:21.179706 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 06 13:39:21 crc kubenswrapper[4757]: I1006 13:39:21.273302 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:21 crc kubenswrapper[4757]: I1006 13:39:21.273382 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:21 crc kubenswrapper[4757]: I1006 13:39:21.273401 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:21 crc kubenswrapper[4757]: I1006 13:39:21.273424 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:21 crc kubenswrapper[4757]: I1006 13:39:21.273444 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:21Z","lastTransitionTime":"2025-10-06T13:39:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:21 crc kubenswrapper[4757]: I1006 13:39:21.376583 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:21 crc kubenswrapper[4757]: I1006 13:39:21.376637 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:21 crc kubenswrapper[4757]: I1006 13:39:21.376653 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:21 crc kubenswrapper[4757]: I1006 13:39:21.376675 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:21 crc kubenswrapper[4757]: I1006 13:39:21.376691 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:21Z","lastTransitionTime":"2025-10-06T13:39:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:21 crc kubenswrapper[4757]: I1006 13:39:21.479632 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:21 crc kubenswrapper[4757]: I1006 13:39:21.479684 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:21 crc kubenswrapper[4757]: I1006 13:39:21.479695 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:21 crc kubenswrapper[4757]: I1006 13:39:21.479711 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:21 crc kubenswrapper[4757]: I1006 13:39:21.479725 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:21Z","lastTransitionTime":"2025-10-06T13:39:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:21 crc kubenswrapper[4757]: I1006 13:39:21.533571 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-58bhb_a6624d05-e024-49f2-bf87-33e7ea4fccbb/ovnkube-controller/2.log" Oct 06 13:39:21 crc kubenswrapper[4757]: I1006 13:39:21.537649 4757 scope.go:117] "RemoveContainer" containerID="737f180b936ea2af92cdc2a3c77290ee6ad30c131c7fd3301562f96387c8a211" Oct 06 13:39:21 crc kubenswrapper[4757]: E1006 13:39:21.537839 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-58bhb_openshift-ovn-kubernetes(a6624d05-e024-49f2-bf87-33e7ea4fccbb)\"" pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" podUID="a6624d05-e024-49f2-bf87-33e7ea4fccbb" Oct 06 13:39:21 crc kubenswrapper[4757]: I1006 13:39:21.550930 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0010c888-d5ad-4b2b-8309-1647fdf0dee3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a2028c8aaf6b1504a3b00edc15272958a5254f6e621734988e959fe0720f1ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kvf5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://151c97566cda63000d69b51293c84fad70c6e331d3a1b62bd3a03ee5732d5214\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kvf5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-7tb7h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:21Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:21 crc kubenswrapper[4757]: I1006 13:39:21.568602 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9qf7z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9144d9fd-70d7-4a29-8e6b-c020c611980a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7771512cbe14fa9b8d61be3a354a5fb8436b84ddf455ae4426440f1599f7ad90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mf4fb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9qf7z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:21Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:21 crc kubenswrapper[4757]: I1006 13:39:21.582743 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:21 crc kubenswrapper[4757]: I1006 13:39:21.582807 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:21 crc kubenswrapper[4757]: I1006 13:39:21.582828 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:21 crc kubenswrapper[4757]: I1006 13:39:21.582853 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:21 crc kubenswrapper[4757]: I1006 13:39:21.582871 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:21Z","lastTransitionTime":"2025-10-06T13:39:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:21 crc kubenswrapper[4757]: I1006 13:39:21.592391 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rhrzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62d4cce6-0583-40a6-b7ea-2996d07b49b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://886c6f797531b702123d9931997b7b5e3ebcf9c14c566ff90e0382eae806f925\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a6cfbbedbad94b500e9bc7c5777459ffbf3db038e8a520a2c9c28d381955df55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a6cfbbedbad94b500e9bc7c5777459ffbf3db038e8a520a2c9c28d381955df55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8c16d3a08acde90ccf1beb5e9ced373395af7436ddbc05c24f6489f6f49fc75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8c16d3a08acde90ccf1beb5e9ced373395af7436ddbc05c24f6489f6f49fc75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7471a1d670fcee421cb43e0a8b8a2b5e15c9d428659f8d63be54895493225242\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7471a1d670fcee421cb43e0a8b8a2b5e15c9d428659f8d63be54895493225242\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d5889f0b8d556cbb5791eb0d9d5475c4fa8beb0c5e9614081c8a7002353282a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d5889f0b8d556cbb5791eb0d9d5475c4fa8beb0c5e9614081c8a7002353282a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0e664e1241d006c37ace1635ef93d30b3062960868874ab67b024d872e8b352\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0e664e1241d006c37ace1635ef93d30b3062960868874ab67b024d872e8b352\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4cbf946357ea3bc8763e1c1b16d8794976662c55043426132f1ece7c52dca82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4cbf946357ea3bc8763e1c1b16d8794976662c55043426132f1ece7c52dca82\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rhrzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:21Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:21 crc kubenswrapper[4757]: I1006 13:39:21.612021 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-j889t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a097c3c1-7cfd-4ba8-878d-40b971843e92\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cf3eee034e9bc6cc2729e4cd23569f535ea8d2058f1576f75baaa5cf8407f2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-29wtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:54Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-j889t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:21Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:21 crc kubenswrapper[4757]: I1006 13:39:21.629805 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l2rjj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"34af6a7e-1458-4f7c-bc54-69fb80966b6b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://53547fa1f6011c3003bed92bbd7200050777786dd1fe8b3979b8d9c695c7259d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:39:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5csnl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3d629cc4ee4a614d547fe962c1367b1717f185765e1ccc325fd941f37e3d656\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:39:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5csnl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:39:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-l2rjj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:21Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:21 crc kubenswrapper[4757]: I1006 13:39:21.647625 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cd6138e-cd0c-4140-8173-eeb737e5b20a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e5392ae609235f0d0526e4d115125c20a75af131b89c07ae6bfa3b4bac84108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d06bc2afab6df22e2c8140de3e6288a3e5f4190b6bdb31a2e71170275e70cae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://056d1a4bdaee6e39a0ba0105c9b48d23cd37c37936951e8e2aa123f6de93463d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e08edbfb6418e26eb6c3ef0d229a623bc625e8e20c29d41a95adc8ac8dc2534\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:32Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:21Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:21 crc kubenswrapper[4757]: I1006 13:39:21.667439 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b45680ebd17f15c56456a2c0eb5c7b58f6f42524be18df11dacbb3f4d904eac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6454ffe81c62fdfd6a5c0ee4aff48e55b1dd83b14b07a93f915fc3023f6bd255\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:21Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:21 crc kubenswrapper[4757]: I1006 13:39:21.686050 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:21 crc kubenswrapper[4757]: I1006 13:39:21.686115 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:21 crc kubenswrapper[4757]: I1006 13:39:21.686126 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:21 crc kubenswrapper[4757]: I1006 13:39:21.686146 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:21 crc kubenswrapper[4757]: I1006 13:39:21.686158 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:21Z","lastTransitionTime":"2025-10-06T13:39:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:21 crc kubenswrapper[4757]: I1006 13:39:21.686348 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:21Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:21 crc kubenswrapper[4757]: I1006 13:39:21.704815 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9j5jn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de18b9fe-e396-469e-a6f6-d87ce91f3270\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4b5ed23b64e4bbc9f76f8cdef77d7f608d8ba029b540b69f64faf266f843155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2cslg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9j5jn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:21Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:21 crc kubenswrapper[4757]: I1006 13:39:21.724377 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70151c959d98e2cb056e57e50b67c68b1c031beb375b357c8941bc9b81dc040f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:21Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:21 crc kubenswrapper[4757]: I1006 13:39:21.745301 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:21Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:21 crc kubenswrapper[4757]: I1006 13:39:21.768746 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6624d05-e024-49f2-bf87-33e7ea4fccbb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbe06e8892901366539846fa68ea4e379c32ca7b21843383dc4a72ac70f92b98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75c8038f49690378c7322e4d2e50f7d5073bb744c3f64317b7c3f4acffaa338d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://df2b012b79afa6379b4a7add72a9093bbdbfcc8d618d6eae350a773fdddacfa8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4093bde478f4910c2b1e2341bb0b3f640454b131e31b9ab7ad5900b8260b7561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1580d51cb7ec22f70787157dc80236a312eca77785329c663d78c02ee9a0685d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38df7deb98a16e465d5fdbad8e04c2e62c5cfb418a48fe9f01da335dba2907ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://737f180b936ea2af92cdc2a3c77290ee6ad30c131c7fd3301562f96387c8a211\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://737f180b936ea2af92cdc2a3c77290ee6ad30c131c7fd3301562f96387c8a211\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-06T13:39:20Z\\\",\\\"message\\\":\\\"e (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1006 13:39:20.135763 6389 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1006 13:39:20.135790 6389 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1006 13:39:20.135830 6389 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1006 13:39:20.135845 6389 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1006 13:39:20.135848 6389 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1006 13:39:20.135872 6389 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1006 13:39:20.135873 6389 handler.go:208] Removed *v1.Node event handler 2\\\\nI1006 13:39:20.135888 6389 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1006 13:39:20.135916 6389 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1006 13:39:20.135920 6389 handler.go:208] Removed *v1.Node event handler 7\\\\nI1006 13:39:20.135944 6389 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1006 13:39:20.135983 6389 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1006 13:39:20.135991 6389 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1006 13:39:20.135996 6389 factory.go:656] Stopping watch factory\\\\nI1006 13:39:20.136011 6389 handler.go:208] Removed *v1.EgressFirewall ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-06T13:39:19Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-58bhb_openshift-ovn-kubernetes(a6624d05-e024-49f2-bf87-33e7ea4fccbb)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5852bc46ae07b295a97cca9f5ef7d06484c12744c0f6ee57b2d14acee319fcaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3fe5f1f425ddd770c518939dee061dc2641c1ca6c3538d8d30ac0724a6353936\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3fe5f1f425ddd770c518939dee061dc2641c1ca6c3538d8d30ac0724a6353936\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-58bhb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:21Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:21 crc kubenswrapper[4757]: I1006 13:39:21.787541 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-sc9qx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a0a24d2-8946-4710-91f2-cc59ecedb5e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwpmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwpmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:39:06Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-sc9qx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:21Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:21 crc kubenswrapper[4757]: I1006 13:39:21.790961 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:21 crc kubenswrapper[4757]: I1006 13:39:21.791303 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:21 crc kubenswrapper[4757]: I1006 13:39:21.792072 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:21 crc kubenswrapper[4757]: I1006 13:39:21.792337 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:21 crc kubenswrapper[4757]: I1006 13:39:21.792518 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:21Z","lastTransitionTime":"2025-10-06T13:39:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:21 crc kubenswrapper[4757]: I1006 13:39:21.808612 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:21Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:21 crc kubenswrapper[4757]: I1006 13:39:21.826790 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d100743e6a2245c99a227c3380c1e9cbdfec0eff361d21977a3e313fa8f7bfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:21Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:21 crc kubenswrapper[4757]: I1006 13:39:21.845693 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aab0f26-c4f7-40b2-892f-0c7d8586a1c1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4119269e9f663bc5c38f4fdc8a5b32f1c3d8446fed6fd36172c41207c9c287b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1bef054ed2b7c27f200f7ab089158ba9946ca17b507c85c377504f539812fcf6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb90630132db1396d9009fe35d6d6b8ab6afe59d32f196cf7a10f3179103227\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://123bbe58d8664c8fad1b86c9c321228f7ffe7cdf14c44e1d9be7802e10a3f0d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://813c083e0e8ba3b397a4b2795de1eac82299d7f673832272138f20162b7f2db2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-06T13:38:46Z\\\",\\\"message\\\":\\\"W1006 13:38:35.655009 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1006 13:38:35.655588 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759757915 cert, and key in /tmp/serving-cert-2033919833/serving-signer.crt, /tmp/serving-cert-2033919833/serving-signer.key\\\\nI1006 13:38:35.906575 1 observer_polling.go:159] Starting file observer\\\\nW1006 13:38:35.908632 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1006 13:38:35.908812 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1006 13:38:35.911572 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2033919833/tls.crt::/tmp/serving-cert-2033919833/tls.key\\\\\\\"\\\\nF1006 13:38:46.393904 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:35Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18af66d22d5905beeec34c4d158a88aadb1a8076e423996cd42e4172a31426e0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:35Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca584765a9838801f99c7fb1f7bbcc740e684033b1b19035ef95b72526c6eda1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca584765a9838801f99c7fb1f7bbcc740e684033b1b19035ef95b72526c6eda1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:32Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:21Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:21 crc kubenswrapper[4757]: I1006 13:39:21.901817 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:21 crc kubenswrapper[4757]: I1006 13:39:21.901893 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:21 crc kubenswrapper[4757]: I1006 13:39:21.901916 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:21 crc kubenswrapper[4757]: I1006 13:39:21.901948 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:21 crc kubenswrapper[4757]: I1006 13:39:21.901972 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:21Z","lastTransitionTime":"2025-10-06T13:39:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:21 crc kubenswrapper[4757]: I1006 13:39:21.909256 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 06 13:39:21 crc kubenswrapper[4757]: I1006 13:39:21.922434 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Oct 06 13:39:21 crc kubenswrapper[4757]: I1006 13:39:21.926064 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aab0f26-c4f7-40b2-892f-0c7d8586a1c1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4119269e9f663bc5c38f4fdc8a5b32f1c3d8446fed6fd36172c41207c9c287b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1bef054ed2b7c27f200f7ab089158ba9946ca17b507c85c377504f539812fcf6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb90630132db1396d9009fe35d6d6b8ab6afe59d32f196cf7a10f3179103227\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://123bbe58d8664c8fad1b86c9c321228f7ffe7cdf14c44e1d9be7802e10a3f0d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://813c083e0e8ba3b397a4b2795de1eac82299d7f673832272138f20162b7f2db2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-06T13:38:46Z\\\",\\\"message\\\":\\\"W1006 13:38:35.655009 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1006 13:38:35.655588 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759757915 cert, and key in /tmp/serving-cert-2033919833/serving-signer.crt, /tmp/serving-cert-2033919833/serving-signer.key\\\\nI1006 13:38:35.906575 1 observer_polling.go:159] Starting file observer\\\\nW1006 13:38:35.908632 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1006 13:38:35.908812 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1006 13:38:35.911572 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2033919833/tls.crt::/tmp/serving-cert-2033919833/tls.key\\\\\\\"\\\\nF1006 13:38:46.393904 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:35Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18af66d22d5905beeec34c4d158a88aadb1a8076e423996cd42e4172a31426e0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:35Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca584765a9838801f99c7fb1f7bbcc740e684033b1b19035ef95b72526c6eda1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca584765a9838801f99c7fb1f7bbcc740e684033b1b19035ef95b72526c6eda1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:32Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:21Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:21 crc kubenswrapper[4757]: I1006 13:39:21.944480 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d100743e6a2245c99a227c3380c1e9cbdfec0eff361d21977a3e313fa8f7bfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:21Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:21 crc kubenswrapper[4757]: I1006 13:39:21.959901 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cd6138e-cd0c-4140-8173-eeb737e5b20a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e5392ae609235f0d0526e4d115125c20a75af131b89c07ae6bfa3b4bac84108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d06bc2afab6df22e2c8140de3e6288a3e5f4190b6bdb31a2e71170275e70cae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://056d1a4bdaee6e39a0ba0105c9b48d23cd37c37936951e8e2aa123f6de93463d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e08edbfb6418e26eb6c3ef0d229a623bc625e8e20c29d41a95adc8ac8dc2534\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:32Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:21Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:21 crc kubenswrapper[4757]: I1006 13:39:21.975042 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0010c888-d5ad-4b2b-8309-1647fdf0dee3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a2028c8aaf6b1504a3b00edc15272958a5254f6e621734988e959fe0720f1ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kvf5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://151c97566cda63000d69b51293c84fad70c6e331d3a1b62bd3a03ee5732d5214\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kvf5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-7tb7h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:21Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:21 crc kubenswrapper[4757]: I1006 13:39:21.995251 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9qf7z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9144d9fd-70d7-4a29-8e6b-c020c611980a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7771512cbe14fa9b8d61be3a354a5fb8436b84ddf455ae4426440f1599f7ad90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mf4fb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9qf7z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:21Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:22 crc kubenswrapper[4757]: I1006 13:39:22.004360 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:22 crc kubenswrapper[4757]: I1006 13:39:22.004445 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:22 crc kubenswrapper[4757]: I1006 13:39:22.004470 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:22 crc kubenswrapper[4757]: I1006 13:39:22.004500 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:22 crc kubenswrapper[4757]: I1006 13:39:22.004523 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:22Z","lastTransitionTime":"2025-10-06T13:39:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:22 crc kubenswrapper[4757]: I1006 13:39:22.014723 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rhrzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62d4cce6-0583-40a6-b7ea-2996d07b49b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://886c6f797531b702123d9931997b7b5e3ebcf9c14c566ff90e0382eae806f925\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a6cfbbedbad94b500e9bc7c5777459ffbf3db038e8a520a2c9c28d381955df55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a6cfbbedbad94b500e9bc7c5777459ffbf3db038e8a520a2c9c28d381955df55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8c16d3a08acde90ccf1beb5e9ced373395af7436ddbc05c24f6489f6f49fc75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8c16d3a08acde90ccf1beb5e9ced373395af7436ddbc05c24f6489f6f49fc75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7471a1d670fcee421cb43e0a8b8a2b5e15c9d428659f8d63be54895493225242\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7471a1d670fcee421cb43e0a8b8a2b5e15c9d428659f8d63be54895493225242\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d5889f0b8d556cbb5791eb0d9d5475c4fa8beb0c5e9614081c8a7002353282a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d5889f0b8d556cbb5791eb0d9d5475c4fa8beb0c5e9614081c8a7002353282a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0e664e1241d006c37ace1635ef93d30b3062960868874ab67b024d872e8b352\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0e664e1241d006c37ace1635ef93d30b3062960868874ab67b024d872e8b352\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4cbf946357ea3bc8763e1c1b16d8794976662c55043426132f1ece7c52dca82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4cbf946357ea3bc8763e1c1b16d8794976662c55043426132f1ece7c52dca82\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rhrzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:22Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:22 crc kubenswrapper[4757]: I1006 13:39:22.028272 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-j889t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a097c3c1-7cfd-4ba8-878d-40b971843e92\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cf3eee034e9bc6cc2729e4cd23569f535ea8d2058f1576f75baaa5cf8407f2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-29wtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:54Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-j889t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:22Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:22 crc kubenswrapper[4757]: I1006 13:39:22.042458 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l2rjj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"34af6a7e-1458-4f7c-bc54-69fb80966b6b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://53547fa1f6011c3003bed92bbd7200050777786dd1fe8b3979b8d9c695c7259d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:39:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5csnl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3d629cc4ee4a614d547fe962c1367b1717f185765e1ccc325fd941f37e3d656\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:39:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5csnl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:39:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-l2rjj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:22Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:22 crc kubenswrapper[4757]: I1006 13:39:22.055370 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/8a0a24d2-8946-4710-91f2-cc59ecedb5e3-metrics-certs\") pod \"network-metrics-daemon-sc9qx\" (UID: \"8a0a24d2-8946-4710-91f2-cc59ecedb5e3\") " pod="openshift-multus/network-metrics-daemon-sc9qx" Oct 06 13:39:22 crc kubenswrapper[4757]: E1006 13:39:22.055575 4757 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 06 13:39:22 crc kubenswrapper[4757]: E1006 13:39:22.055623 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/8a0a24d2-8946-4710-91f2-cc59ecedb5e3-metrics-certs podName:8a0a24d2-8946-4710-91f2-cc59ecedb5e3 nodeName:}" failed. No retries permitted until 2025-10-06 13:39:38.055608298 +0000 UTC m=+66.552926845 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/8a0a24d2-8946-4710-91f2-cc59ecedb5e3-metrics-certs") pod "network-metrics-daemon-sc9qx" (UID: "8a0a24d2-8946-4710-91f2-cc59ecedb5e3") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 06 13:39:22 crc kubenswrapper[4757]: I1006 13:39:22.058538 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b45680ebd17f15c56456a2c0eb5c7b58f6f42524be18df11dacbb3f4d904eac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6454ffe81c62fdfd6a5c0ee4aff48e55b1dd83b14b07a93f915fc3023f6bd255\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:22Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:22 crc kubenswrapper[4757]: I1006 13:39:22.071569 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:22Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:22 crc kubenswrapper[4757]: I1006 13:39:22.086581 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:22Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:22 crc kubenswrapper[4757]: I1006 13:39:22.099170 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9j5jn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de18b9fe-e396-469e-a6f6-d87ce91f3270\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4b5ed23b64e4bbc9f76f8cdef77d7f608d8ba029b540b69f64faf266f843155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2cslg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9j5jn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:22Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:22 crc kubenswrapper[4757]: I1006 13:39:22.107867 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:22 crc kubenswrapper[4757]: I1006 13:39:22.107904 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:22 crc kubenswrapper[4757]: I1006 13:39:22.107915 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:22 crc kubenswrapper[4757]: I1006 13:39:22.107930 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:22 crc kubenswrapper[4757]: I1006 13:39:22.107941 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:22Z","lastTransitionTime":"2025-10-06T13:39:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:22 crc kubenswrapper[4757]: I1006 13:39:22.114034 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70151c959d98e2cb056e57e50b67c68b1c031beb375b357c8941bc9b81dc040f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:22Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:22 crc kubenswrapper[4757]: I1006 13:39:22.129290 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:22Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:22 crc kubenswrapper[4757]: I1006 13:39:22.155410 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6624d05-e024-49f2-bf87-33e7ea4fccbb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbe06e8892901366539846fa68ea4e379c32ca7b21843383dc4a72ac70f92b98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75c8038f49690378c7322e4d2e50f7d5073bb744c3f64317b7c3f4acffaa338d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://df2b012b79afa6379b4a7add72a9093bbdbfcc8d618d6eae350a773fdddacfa8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4093bde478f4910c2b1e2341bb0b3f640454b131e31b9ab7ad5900b8260b7561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1580d51cb7ec22f70787157dc80236a312eca77785329c663d78c02ee9a0685d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38df7deb98a16e465d5fdbad8e04c2e62c5cfb418a48fe9f01da335dba2907ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://737f180b936ea2af92cdc2a3c77290ee6ad30c131c7fd3301562f96387c8a211\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://737f180b936ea2af92cdc2a3c77290ee6ad30c131c7fd3301562f96387c8a211\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-06T13:39:20Z\\\",\\\"message\\\":\\\"e (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1006 13:39:20.135763 6389 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1006 13:39:20.135790 6389 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1006 13:39:20.135830 6389 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1006 13:39:20.135845 6389 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1006 13:39:20.135848 6389 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1006 13:39:20.135872 6389 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1006 13:39:20.135873 6389 handler.go:208] Removed *v1.Node event handler 2\\\\nI1006 13:39:20.135888 6389 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1006 13:39:20.135916 6389 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1006 13:39:20.135920 6389 handler.go:208] Removed *v1.Node event handler 7\\\\nI1006 13:39:20.135944 6389 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1006 13:39:20.135983 6389 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1006 13:39:20.135991 6389 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1006 13:39:20.135996 6389 factory.go:656] Stopping watch factory\\\\nI1006 13:39:20.136011 6389 handler.go:208] Removed *v1.EgressFirewall ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-06T13:39:19Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-58bhb_openshift-ovn-kubernetes(a6624d05-e024-49f2-bf87-33e7ea4fccbb)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5852bc46ae07b295a97cca9f5ef7d06484c12744c0f6ee57b2d14acee319fcaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3fe5f1f425ddd770c518939dee061dc2641c1ca6c3538d8d30ac0724a6353936\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3fe5f1f425ddd770c518939dee061dc2641c1ca6c3538d8d30ac0724a6353936\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-58bhb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:22Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:22 crc kubenswrapper[4757]: I1006 13:39:22.171954 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-sc9qx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a0a24d2-8946-4710-91f2-cc59ecedb5e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwpmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwpmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:39:06Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-sc9qx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:22Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:22 crc kubenswrapper[4757]: I1006 13:39:22.179425 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-sc9qx" Oct 06 13:39:22 crc kubenswrapper[4757]: I1006 13:39:22.179543 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 06 13:39:22 crc kubenswrapper[4757]: I1006 13:39:22.179446 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 06 13:39:22 crc kubenswrapper[4757]: E1006 13:39:22.179670 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-sc9qx" podUID="8a0a24d2-8946-4710-91f2-cc59ecedb5e3" Oct 06 13:39:22 crc kubenswrapper[4757]: E1006 13:39:22.179878 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 06 13:39:22 crc kubenswrapper[4757]: E1006 13:39:22.180178 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 06 13:39:22 crc kubenswrapper[4757]: I1006 13:39:22.201727 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aab0f26-c4f7-40b2-892f-0c7d8586a1c1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4119269e9f663bc5c38f4fdc8a5b32f1c3d8446fed6fd36172c41207c9c287b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1bef054ed2b7c27f200f7ab089158ba9946ca17b507c85c377504f539812fcf6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb90630132db1396d9009fe35d6d6b8ab6afe59d32f196cf7a10f3179103227\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://123bbe58d8664c8fad1b86c9c321228f7ffe7cdf14c44e1d9be7802e10a3f0d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://813c083e0e8ba3b397a4b2795de1eac82299d7f673832272138f20162b7f2db2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-06T13:38:46Z\\\",\\\"message\\\":\\\"W1006 13:38:35.655009 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1006 13:38:35.655588 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759757915 cert, and key in /tmp/serving-cert-2033919833/serving-signer.crt, /tmp/serving-cert-2033919833/serving-signer.key\\\\nI1006 13:38:35.906575 1 observer_polling.go:159] Starting file observer\\\\nW1006 13:38:35.908632 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1006 13:38:35.908812 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1006 13:38:35.911572 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2033919833/tls.crt::/tmp/serving-cert-2033919833/tls.key\\\\\\\"\\\\nF1006 13:38:46.393904 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:35Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18af66d22d5905beeec34c4d158a88aadb1a8076e423996cd42e4172a31426e0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:35Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca584765a9838801f99c7fb1f7bbcc740e684033b1b19035ef95b72526c6eda1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca584765a9838801f99c7fb1f7bbcc740e684033b1b19035ef95b72526c6eda1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:32Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:22Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:22 crc kubenswrapper[4757]: I1006 13:39:22.211938 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:22 crc kubenswrapper[4757]: I1006 13:39:22.211995 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:22 crc kubenswrapper[4757]: I1006 13:39:22.212014 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:22 crc kubenswrapper[4757]: I1006 13:39:22.212038 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:22 crc kubenswrapper[4757]: I1006 13:39:22.212056 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:22Z","lastTransitionTime":"2025-10-06T13:39:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:22 crc kubenswrapper[4757]: I1006 13:39:22.225621 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d100743e6a2245c99a227c3380c1e9cbdfec0eff361d21977a3e313fa8f7bfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:22Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:22 crc kubenswrapper[4757]: I1006 13:39:22.245787 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cd6138e-cd0c-4140-8173-eeb737e5b20a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e5392ae609235f0d0526e4d115125c20a75af131b89c07ae6bfa3b4bac84108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d06bc2afab6df22e2c8140de3e6288a3e5f4190b6bdb31a2e71170275e70cae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://056d1a4bdaee6e39a0ba0105c9b48d23cd37c37936951e8e2aa123f6de93463d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e08edbfb6418e26eb6c3ef0d229a623bc625e8e20c29d41a95adc8ac8dc2534\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:32Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:22Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:22 crc kubenswrapper[4757]: I1006 13:39:22.261507 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c409c81-c170-4308-98b3-fb1cc65ff1bb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d9249418a973cd8c582a5f126d293b35071887e197f98c0c911823fc76e7ab90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a66ed2ef9cbfedf37b5bc458002c81c7d608f3bb904f5e7480d9874cfe29ada\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://69aec338eb12fbfcd83c476590f12c4a2b4aff507981052b00060900f88a26a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fbe7f72f01daeac82347a11112c1138e744a97c0df2d111fef60c8be750ce39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9fbe7f72f01daeac82347a11112c1138e744a97c0df2d111fef60c8be750ce39\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:32Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:22Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:22 crc kubenswrapper[4757]: I1006 13:39:22.277340 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0010c888-d5ad-4b2b-8309-1647fdf0dee3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a2028c8aaf6b1504a3b00edc15272958a5254f6e621734988e959fe0720f1ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kvf5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://151c97566cda63000d69b51293c84fad70c6e331d3a1b62bd3a03ee5732d5214\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kvf5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-7tb7h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:22Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:22 crc kubenswrapper[4757]: I1006 13:39:22.292902 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9qf7z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9144d9fd-70d7-4a29-8e6b-c020c611980a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7771512cbe14fa9b8d61be3a354a5fb8436b84ddf455ae4426440f1599f7ad90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mf4fb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9qf7z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:22Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:22 crc kubenswrapper[4757]: I1006 13:39:22.312977 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rhrzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62d4cce6-0583-40a6-b7ea-2996d07b49b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://886c6f797531b702123d9931997b7b5e3ebcf9c14c566ff90e0382eae806f925\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a6cfbbedbad94b500e9bc7c5777459ffbf3db038e8a520a2c9c28d381955df55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a6cfbbedbad94b500e9bc7c5777459ffbf3db038e8a520a2c9c28d381955df55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8c16d3a08acde90ccf1beb5e9ced373395af7436ddbc05c24f6489f6f49fc75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8c16d3a08acde90ccf1beb5e9ced373395af7436ddbc05c24f6489f6f49fc75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7471a1d670fcee421cb43e0a8b8a2b5e15c9d428659f8d63be54895493225242\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7471a1d670fcee421cb43e0a8b8a2b5e15c9d428659f8d63be54895493225242\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d5889f0b8d556cbb5791eb0d9d5475c4fa8beb0c5e9614081c8a7002353282a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d5889f0b8d556cbb5791eb0d9d5475c4fa8beb0c5e9614081c8a7002353282a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0e664e1241d006c37ace1635ef93d30b3062960868874ab67b024d872e8b352\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0e664e1241d006c37ace1635ef93d30b3062960868874ab67b024d872e8b352\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4cbf946357ea3bc8763e1c1b16d8794976662c55043426132f1ece7c52dca82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4cbf946357ea3bc8763e1c1b16d8794976662c55043426132f1ece7c52dca82\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rhrzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:22Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:22 crc kubenswrapper[4757]: I1006 13:39:22.314195 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:22 crc kubenswrapper[4757]: I1006 13:39:22.314350 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:22 crc kubenswrapper[4757]: I1006 13:39:22.314470 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:22 crc kubenswrapper[4757]: I1006 13:39:22.314587 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:22 crc kubenswrapper[4757]: I1006 13:39:22.314720 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:22Z","lastTransitionTime":"2025-10-06T13:39:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:22 crc kubenswrapper[4757]: I1006 13:39:22.325456 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-j889t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a097c3c1-7cfd-4ba8-878d-40b971843e92\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cf3eee034e9bc6cc2729e4cd23569f535ea8d2058f1576f75baaa5cf8407f2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-29wtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:54Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-j889t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:22Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:22 crc kubenswrapper[4757]: I1006 13:39:22.338019 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l2rjj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"34af6a7e-1458-4f7c-bc54-69fb80966b6b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://53547fa1f6011c3003bed92bbd7200050777786dd1fe8b3979b8d9c695c7259d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:39:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5csnl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3d629cc4ee4a614d547fe962c1367b1717f185765e1ccc325fd941f37e3d656\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:39:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5csnl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:39:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-l2rjj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:22Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:22 crc kubenswrapper[4757]: I1006 13:39:22.351963 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b45680ebd17f15c56456a2c0eb5c7b58f6f42524be18df11dacbb3f4d904eac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6454ffe81c62fdfd6a5c0ee4aff48e55b1dd83b14b07a93f915fc3023f6bd255\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:22Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:22 crc kubenswrapper[4757]: I1006 13:39:22.364316 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:22Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:22 crc kubenswrapper[4757]: I1006 13:39:22.384801 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:22Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:22 crc kubenswrapper[4757]: I1006 13:39:22.396621 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9j5jn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de18b9fe-e396-469e-a6f6-d87ce91f3270\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4b5ed23b64e4bbc9f76f8cdef77d7f608d8ba029b540b69f64faf266f843155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2cslg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9j5jn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:22Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:22 crc kubenswrapper[4757]: I1006 13:39:22.409450 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70151c959d98e2cb056e57e50b67c68b1c031beb375b357c8941bc9b81dc040f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:22Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:22 crc kubenswrapper[4757]: I1006 13:39:22.417881 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:22 crc kubenswrapper[4757]: I1006 13:39:22.418009 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:22 crc kubenswrapper[4757]: I1006 13:39:22.418123 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:22 crc kubenswrapper[4757]: I1006 13:39:22.418279 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:22 crc kubenswrapper[4757]: I1006 13:39:22.418396 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:22Z","lastTransitionTime":"2025-10-06T13:39:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:22 crc kubenswrapper[4757]: I1006 13:39:22.431218 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:22Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:22 crc kubenswrapper[4757]: I1006 13:39:22.460167 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6624d05-e024-49f2-bf87-33e7ea4fccbb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbe06e8892901366539846fa68ea4e379c32ca7b21843383dc4a72ac70f92b98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75c8038f49690378c7322e4d2e50f7d5073bb744c3f64317b7c3f4acffaa338d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://df2b012b79afa6379b4a7add72a9093bbdbfcc8d618d6eae350a773fdddacfa8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4093bde478f4910c2b1e2341bb0b3f640454b131e31b9ab7ad5900b8260b7561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1580d51cb7ec22f70787157dc80236a312eca77785329c663d78c02ee9a0685d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38df7deb98a16e465d5fdbad8e04c2e62c5cfb418a48fe9f01da335dba2907ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://737f180b936ea2af92cdc2a3c77290ee6ad30c131c7fd3301562f96387c8a211\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://737f180b936ea2af92cdc2a3c77290ee6ad30c131c7fd3301562f96387c8a211\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-06T13:39:20Z\\\",\\\"message\\\":\\\"e (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1006 13:39:20.135763 6389 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1006 13:39:20.135790 6389 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1006 13:39:20.135830 6389 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1006 13:39:20.135845 6389 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1006 13:39:20.135848 6389 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1006 13:39:20.135872 6389 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1006 13:39:20.135873 6389 handler.go:208] Removed *v1.Node event handler 2\\\\nI1006 13:39:20.135888 6389 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1006 13:39:20.135916 6389 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1006 13:39:20.135920 6389 handler.go:208] Removed *v1.Node event handler 7\\\\nI1006 13:39:20.135944 6389 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1006 13:39:20.135983 6389 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1006 13:39:20.135991 6389 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1006 13:39:20.135996 6389 factory.go:656] Stopping watch factory\\\\nI1006 13:39:20.136011 6389 handler.go:208] Removed *v1.EgressFirewall ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-06T13:39:19Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-58bhb_openshift-ovn-kubernetes(a6624d05-e024-49f2-bf87-33e7ea4fccbb)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5852bc46ae07b295a97cca9f5ef7d06484c12744c0f6ee57b2d14acee319fcaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3fe5f1f425ddd770c518939dee061dc2641c1ca6c3538d8d30ac0724a6353936\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3fe5f1f425ddd770c518939dee061dc2641c1ca6c3538d8d30ac0724a6353936\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-58bhb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:22Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:22 crc kubenswrapper[4757]: I1006 13:39:22.475497 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-sc9qx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a0a24d2-8946-4710-91f2-cc59ecedb5e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwpmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwpmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:39:06Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-sc9qx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:22Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:22 crc kubenswrapper[4757]: I1006 13:39:22.521914 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:22 crc kubenswrapper[4757]: I1006 13:39:22.522178 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:22 crc kubenswrapper[4757]: I1006 13:39:22.522279 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:22 crc kubenswrapper[4757]: I1006 13:39:22.522390 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:22 crc kubenswrapper[4757]: I1006 13:39:22.522510 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:22Z","lastTransitionTime":"2025-10-06T13:39:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:22 crc kubenswrapper[4757]: I1006 13:39:22.626391 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:22 crc kubenswrapper[4757]: I1006 13:39:22.626448 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:22 crc kubenswrapper[4757]: I1006 13:39:22.626488 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:22 crc kubenswrapper[4757]: I1006 13:39:22.626524 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:22 crc kubenswrapper[4757]: I1006 13:39:22.626546 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:22Z","lastTransitionTime":"2025-10-06T13:39:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:22 crc kubenswrapper[4757]: I1006 13:39:22.730016 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:22 crc kubenswrapper[4757]: I1006 13:39:22.730131 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:22 crc kubenswrapper[4757]: I1006 13:39:22.730157 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:22 crc kubenswrapper[4757]: I1006 13:39:22.730187 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:22 crc kubenswrapper[4757]: I1006 13:39:22.730208 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:22Z","lastTransitionTime":"2025-10-06T13:39:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:22 crc kubenswrapper[4757]: I1006 13:39:22.833319 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:22 crc kubenswrapper[4757]: I1006 13:39:22.833404 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:22 crc kubenswrapper[4757]: I1006 13:39:22.833433 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:22 crc kubenswrapper[4757]: I1006 13:39:22.833462 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:22 crc kubenswrapper[4757]: I1006 13:39:22.833481 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:22Z","lastTransitionTime":"2025-10-06T13:39:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:22 crc kubenswrapper[4757]: I1006 13:39:22.936969 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:22 crc kubenswrapper[4757]: I1006 13:39:22.937058 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:22 crc kubenswrapper[4757]: I1006 13:39:22.937137 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:22 crc kubenswrapper[4757]: I1006 13:39:22.937169 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:22 crc kubenswrapper[4757]: I1006 13:39:22.937193 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:22Z","lastTransitionTime":"2025-10-06T13:39:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:23 crc kubenswrapper[4757]: I1006 13:39:23.039884 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:23 crc kubenswrapper[4757]: I1006 13:39:23.039958 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:23 crc kubenswrapper[4757]: I1006 13:39:23.039980 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:23 crc kubenswrapper[4757]: I1006 13:39:23.040003 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:23 crc kubenswrapper[4757]: I1006 13:39:23.040019 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:23Z","lastTransitionTime":"2025-10-06T13:39:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:23 crc kubenswrapper[4757]: I1006 13:39:23.143166 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:23 crc kubenswrapper[4757]: I1006 13:39:23.143242 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:23 crc kubenswrapper[4757]: I1006 13:39:23.143265 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:23 crc kubenswrapper[4757]: I1006 13:39:23.143295 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:23 crc kubenswrapper[4757]: I1006 13:39:23.143322 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:23Z","lastTransitionTime":"2025-10-06T13:39:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:23 crc kubenswrapper[4757]: I1006 13:39:23.179411 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 06 13:39:23 crc kubenswrapper[4757]: E1006 13:39:23.179596 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 06 13:39:23 crc kubenswrapper[4757]: I1006 13:39:23.246626 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:23 crc kubenswrapper[4757]: I1006 13:39:23.246695 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:23 crc kubenswrapper[4757]: I1006 13:39:23.246719 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:23 crc kubenswrapper[4757]: I1006 13:39:23.246749 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:23 crc kubenswrapper[4757]: I1006 13:39:23.246772 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:23Z","lastTransitionTime":"2025-10-06T13:39:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:23 crc kubenswrapper[4757]: I1006 13:39:23.348947 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:23 crc kubenswrapper[4757]: I1006 13:39:23.348998 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:23 crc kubenswrapper[4757]: I1006 13:39:23.349014 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:23 crc kubenswrapper[4757]: I1006 13:39:23.349032 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:23 crc kubenswrapper[4757]: I1006 13:39:23.349046 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:23Z","lastTransitionTime":"2025-10-06T13:39:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:23 crc kubenswrapper[4757]: I1006 13:39:23.451480 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:23 crc kubenswrapper[4757]: I1006 13:39:23.451822 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:23 crc kubenswrapper[4757]: I1006 13:39:23.451883 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:23 crc kubenswrapper[4757]: I1006 13:39:23.451944 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:23 crc kubenswrapper[4757]: I1006 13:39:23.452011 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:23Z","lastTransitionTime":"2025-10-06T13:39:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:23 crc kubenswrapper[4757]: I1006 13:39:23.555002 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:23 crc kubenswrapper[4757]: I1006 13:39:23.555073 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:23 crc kubenswrapper[4757]: I1006 13:39:23.555126 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:23 crc kubenswrapper[4757]: I1006 13:39:23.555154 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:23 crc kubenswrapper[4757]: I1006 13:39:23.555180 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:23Z","lastTransitionTime":"2025-10-06T13:39:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:23 crc kubenswrapper[4757]: I1006 13:39:23.657701 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:23 crc kubenswrapper[4757]: I1006 13:39:23.657751 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:23 crc kubenswrapper[4757]: I1006 13:39:23.657765 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:23 crc kubenswrapper[4757]: I1006 13:39:23.657783 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:23 crc kubenswrapper[4757]: I1006 13:39:23.657796 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:23Z","lastTransitionTime":"2025-10-06T13:39:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:23 crc kubenswrapper[4757]: I1006 13:39:23.760617 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:23 crc kubenswrapper[4757]: I1006 13:39:23.760674 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:23 crc kubenswrapper[4757]: I1006 13:39:23.760696 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:23 crc kubenswrapper[4757]: I1006 13:39:23.760722 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:23 crc kubenswrapper[4757]: I1006 13:39:23.760740 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:23Z","lastTransitionTime":"2025-10-06T13:39:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:23 crc kubenswrapper[4757]: I1006 13:39:23.864428 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:23 crc kubenswrapper[4757]: I1006 13:39:23.864473 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:23 crc kubenswrapper[4757]: I1006 13:39:23.864489 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:23 crc kubenswrapper[4757]: I1006 13:39:23.864510 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:23 crc kubenswrapper[4757]: I1006 13:39:23.864527 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:23Z","lastTransitionTime":"2025-10-06T13:39:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:23 crc kubenswrapper[4757]: I1006 13:39:23.967749 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:23 crc kubenswrapper[4757]: I1006 13:39:23.967817 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:23 crc kubenswrapper[4757]: I1006 13:39:23.967829 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:23 crc kubenswrapper[4757]: I1006 13:39:23.967851 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:23 crc kubenswrapper[4757]: I1006 13:39:23.967864 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:23Z","lastTransitionTime":"2025-10-06T13:39:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:23 crc kubenswrapper[4757]: I1006 13:39:23.976275 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 06 13:39:23 crc kubenswrapper[4757]: E1006 13:39:23.976488 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-06 13:39:55.976457217 +0000 UTC m=+84.473775764 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:39:23 crc kubenswrapper[4757]: I1006 13:39:23.976568 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 06 13:39:23 crc kubenswrapper[4757]: I1006 13:39:23.976639 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 06 13:39:23 crc kubenswrapper[4757]: E1006 13:39:23.976773 4757 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 06 13:39:23 crc kubenswrapper[4757]: E1006 13:39:23.976839 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-06 13:39:55.976822872 +0000 UTC m=+84.474141429 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 06 13:39:23 crc kubenswrapper[4757]: E1006 13:39:23.976779 4757 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 06 13:39:23 crc kubenswrapper[4757]: E1006 13:39:23.976897 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-06 13:39:55.976884564 +0000 UTC m=+84.474203111 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 06 13:39:24 crc kubenswrapper[4757]: I1006 13:39:24.070392 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:24 crc kubenswrapper[4757]: I1006 13:39:24.070440 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:24 crc kubenswrapper[4757]: I1006 13:39:24.070456 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:24 crc kubenswrapper[4757]: I1006 13:39:24.070481 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:24 crc kubenswrapper[4757]: I1006 13:39:24.070497 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:24Z","lastTransitionTime":"2025-10-06T13:39:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:24 crc kubenswrapper[4757]: I1006 13:39:24.077990 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 06 13:39:24 crc kubenswrapper[4757]: I1006 13:39:24.078158 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 06 13:39:24 crc kubenswrapper[4757]: E1006 13:39:24.078319 4757 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 06 13:39:24 crc kubenswrapper[4757]: E1006 13:39:24.078359 4757 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 06 13:39:24 crc kubenswrapper[4757]: E1006 13:39:24.078383 4757 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 06 13:39:24 crc kubenswrapper[4757]: E1006 13:39:24.078459 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-06 13:39:56.078431038 +0000 UTC m=+84.575749615 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 06 13:39:24 crc kubenswrapper[4757]: E1006 13:39:24.082470 4757 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 06 13:39:24 crc kubenswrapper[4757]: E1006 13:39:24.082703 4757 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 06 13:39:24 crc kubenswrapper[4757]: E1006 13:39:24.083044 4757 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 06 13:39:24 crc kubenswrapper[4757]: E1006 13:39:24.083464 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-06 13:39:56.083427481 +0000 UTC m=+84.580746058 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 06 13:39:24 crc kubenswrapper[4757]: I1006 13:39:24.174242 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:24 crc kubenswrapper[4757]: I1006 13:39:24.174306 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:24 crc kubenswrapper[4757]: I1006 13:39:24.174325 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:24 crc kubenswrapper[4757]: I1006 13:39:24.174351 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:24 crc kubenswrapper[4757]: I1006 13:39:24.174373 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:24Z","lastTransitionTime":"2025-10-06T13:39:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:24 crc kubenswrapper[4757]: I1006 13:39:24.180473 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-sc9qx" Oct 06 13:39:24 crc kubenswrapper[4757]: E1006 13:39:24.180670 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-sc9qx" podUID="8a0a24d2-8946-4710-91f2-cc59ecedb5e3" Oct 06 13:39:24 crc kubenswrapper[4757]: I1006 13:39:24.181231 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 06 13:39:24 crc kubenswrapper[4757]: E1006 13:39:24.181311 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 06 13:39:24 crc kubenswrapper[4757]: I1006 13:39:24.181646 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 06 13:39:24 crc kubenswrapper[4757]: E1006 13:39:24.181713 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 06 13:39:24 crc kubenswrapper[4757]: I1006 13:39:24.277504 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:24 crc kubenswrapper[4757]: I1006 13:39:24.277575 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:24 crc kubenswrapper[4757]: I1006 13:39:24.277585 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:24 crc kubenswrapper[4757]: I1006 13:39:24.277599 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:24 crc kubenswrapper[4757]: I1006 13:39:24.277611 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:24Z","lastTransitionTime":"2025-10-06T13:39:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:24 crc kubenswrapper[4757]: I1006 13:39:24.380674 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:24 crc kubenswrapper[4757]: I1006 13:39:24.380717 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:24 crc kubenswrapper[4757]: I1006 13:39:24.380729 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:24 crc kubenswrapper[4757]: I1006 13:39:24.380747 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:24 crc kubenswrapper[4757]: I1006 13:39:24.380760 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:24Z","lastTransitionTime":"2025-10-06T13:39:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:24 crc kubenswrapper[4757]: I1006 13:39:24.483265 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:24 crc kubenswrapper[4757]: I1006 13:39:24.483345 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:24 crc kubenswrapper[4757]: I1006 13:39:24.483375 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:24 crc kubenswrapper[4757]: I1006 13:39:24.483406 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:24 crc kubenswrapper[4757]: I1006 13:39:24.483432 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:24Z","lastTransitionTime":"2025-10-06T13:39:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:24 crc kubenswrapper[4757]: I1006 13:39:24.586021 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:24 crc kubenswrapper[4757]: I1006 13:39:24.586084 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:24 crc kubenswrapper[4757]: I1006 13:39:24.586136 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:24 crc kubenswrapper[4757]: I1006 13:39:24.586171 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:24 crc kubenswrapper[4757]: I1006 13:39:24.586200 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:24Z","lastTransitionTime":"2025-10-06T13:39:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:24 crc kubenswrapper[4757]: I1006 13:39:24.689529 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:24 crc kubenswrapper[4757]: I1006 13:39:24.689598 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:24 crc kubenswrapper[4757]: I1006 13:39:24.689617 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:24 crc kubenswrapper[4757]: I1006 13:39:24.689639 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:24 crc kubenswrapper[4757]: I1006 13:39:24.689658 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:24Z","lastTransitionTime":"2025-10-06T13:39:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:24 crc kubenswrapper[4757]: I1006 13:39:24.792306 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:24 crc kubenswrapper[4757]: I1006 13:39:24.792394 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:24 crc kubenswrapper[4757]: I1006 13:39:24.792421 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:24 crc kubenswrapper[4757]: I1006 13:39:24.792450 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:24 crc kubenswrapper[4757]: I1006 13:39:24.792472 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:24Z","lastTransitionTime":"2025-10-06T13:39:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:24 crc kubenswrapper[4757]: I1006 13:39:24.895578 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:24 crc kubenswrapper[4757]: I1006 13:39:24.895660 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:24 crc kubenswrapper[4757]: I1006 13:39:24.895685 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:24 crc kubenswrapper[4757]: I1006 13:39:24.895710 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:24 crc kubenswrapper[4757]: I1006 13:39:24.895728 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:24Z","lastTransitionTime":"2025-10-06T13:39:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:24 crc kubenswrapper[4757]: I1006 13:39:24.998233 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:24 crc kubenswrapper[4757]: I1006 13:39:24.998309 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:24 crc kubenswrapper[4757]: I1006 13:39:24.998332 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:24 crc kubenswrapper[4757]: I1006 13:39:24.998362 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:24 crc kubenswrapper[4757]: I1006 13:39:24.998385 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:24Z","lastTransitionTime":"2025-10-06T13:39:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:25 crc kubenswrapper[4757]: I1006 13:39:25.101300 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:25 crc kubenswrapper[4757]: I1006 13:39:25.101356 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:25 crc kubenswrapper[4757]: I1006 13:39:25.101378 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:25 crc kubenswrapper[4757]: I1006 13:39:25.101402 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:25 crc kubenswrapper[4757]: I1006 13:39:25.101423 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:25Z","lastTransitionTime":"2025-10-06T13:39:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:25 crc kubenswrapper[4757]: I1006 13:39:25.179460 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 06 13:39:25 crc kubenswrapper[4757]: E1006 13:39:25.179646 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 06 13:39:25 crc kubenswrapper[4757]: I1006 13:39:25.205053 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:25 crc kubenswrapper[4757]: I1006 13:39:25.205151 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:25 crc kubenswrapper[4757]: I1006 13:39:25.205177 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:25 crc kubenswrapper[4757]: I1006 13:39:25.205215 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:25 crc kubenswrapper[4757]: I1006 13:39:25.205239 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:25Z","lastTransitionTime":"2025-10-06T13:39:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:25 crc kubenswrapper[4757]: I1006 13:39:25.307733 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:25 crc kubenswrapper[4757]: I1006 13:39:25.307783 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:25 crc kubenswrapper[4757]: I1006 13:39:25.307800 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:25 crc kubenswrapper[4757]: I1006 13:39:25.307821 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:25 crc kubenswrapper[4757]: I1006 13:39:25.307838 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:25Z","lastTransitionTime":"2025-10-06T13:39:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:25 crc kubenswrapper[4757]: I1006 13:39:25.410697 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:25 crc kubenswrapper[4757]: I1006 13:39:25.410765 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:25 crc kubenswrapper[4757]: I1006 13:39:25.410776 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:25 crc kubenswrapper[4757]: I1006 13:39:25.410813 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:25 crc kubenswrapper[4757]: I1006 13:39:25.410825 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:25Z","lastTransitionTime":"2025-10-06T13:39:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:25 crc kubenswrapper[4757]: I1006 13:39:25.513913 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:25 crc kubenswrapper[4757]: I1006 13:39:25.514023 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:25 crc kubenswrapper[4757]: I1006 13:39:25.514048 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:25 crc kubenswrapper[4757]: I1006 13:39:25.514077 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:25 crc kubenswrapper[4757]: I1006 13:39:25.514156 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:25Z","lastTransitionTime":"2025-10-06T13:39:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:25 crc kubenswrapper[4757]: I1006 13:39:25.616644 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:25 crc kubenswrapper[4757]: I1006 13:39:25.616681 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:25 crc kubenswrapper[4757]: I1006 13:39:25.616692 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:25 crc kubenswrapper[4757]: I1006 13:39:25.616708 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:25 crc kubenswrapper[4757]: I1006 13:39:25.616719 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:25Z","lastTransitionTime":"2025-10-06T13:39:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:25 crc kubenswrapper[4757]: I1006 13:39:25.719875 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:25 crc kubenswrapper[4757]: I1006 13:39:25.719940 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:25 crc kubenswrapper[4757]: I1006 13:39:25.719954 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:25 crc kubenswrapper[4757]: I1006 13:39:25.719973 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:25 crc kubenswrapper[4757]: I1006 13:39:25.719986 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:25Z","lastTransitionTime":"2025-10-06T13:39:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:25 crc kubenswrapper[4757]: I1006 13:39:25.823528 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:25 crc kubenswrapper[4757]: I1006 13:39:25.823576 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:25 crc kubenswrapper[4757]: I1006 13:39:25.823587 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:25 crc kubenswrapper[4757]: I1006 13:39:25.823609 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:25 crc kubenswrapper[4757]: I1006 13:39:25.823619 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:25Z","lastTransitionTime":"2025-10-06T13:39:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:25 crc kubenswrapper[4757]: I1006 13:39:25.927179 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:25 crc kubenswrapper[4757]: I1006 13:39:25.927253 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:25 crc kubenswrapper[4757]: I1006 13:39:25.927276 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:25 crc kubenswrapper[4757]: I1006 13:39:25.927306 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:25 crc kubenswrapper[4757]: I1006 13:39:25.927328 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:25Z","lastTransitionTime":"2025-10-06T13:39:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:26 crc kubenswrapper[4757]: I1006 13:39:26.029877 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:26 crc kubenswrapper[4757]: I1006 13:39:26.029970 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:26 crc kubenswrapper[4757]: I1006 13:39:26.030000 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:26 crc kubenswrapper[4757]: I1006 13:39:26.030028 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:26 crc kubenswrapper[4757]: I1006 13:39:26.030053 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:26Z","lastTransitionTime":"2025-10-06T13:39:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:26 crc kubenswrapper[4757]: I1006 13:39:26.133630 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:26 crc kubenswrapper[4757]: I1006 13:39:26.133715 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:26 crc kubenswrapper[4757]: I1006 13:39:26.133739 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:26 crc kubenswrapper[4757]: I1006 13:39:26.133772 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:26 crc kubenswrapper[4757]: I1006 13:39:26.133808 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:26Z","lastTransitionTime":"2025-10-06T13:39:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:26 crc kubenswrapper[4757]: I1006 13:39:26.179604 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 06 13:39:26 crc kubenswrapper[4757]: I1006 13:39:26.179724 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-sc9qx" Oct 06 13:39:26 crc kubenswrapper[4757]: E1006 13:39:26.179802 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 06 13:39:26 crc kubenswrapper[4757]: I1006 13:39:26.179833 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 06 13:39:26 crc kubenswrapper[4757]: E1006 13:39:26.179996 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-sc9qx" podUID="8a0a24d2-8946-4710-91f2-cc59ecedb5e3" Oct 06 13:39:26 crc kubenswrapper[4757]: E1006 13:39:26.180262 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 06 13:39:26 crc kubenswrapper[4757]: I1006 13:39:26.236897 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:26 crc kubenswrapper[4757]: I1006 13:39:26.236988 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:26 crc kubenswrapper[4757]: I1006 13:39:26.237015 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:26 crc kubenswrapper[4757]: I1006 13:39:26.237049 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:26 crc kubenswrapper[4757]: I1006 13:39:26.237074 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:26Z","lastTransitionTime":"2025-10-06T13:39:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:26 crc kubenswrapper[4757]: I1006 13:39:26.340457 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:26 crc kubenswrapper[4757]: I1006 13:39:26.340548 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:26 crc kubenswrapper[4757]: I1006 13:39:26.340566 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:26 crc kubenswrapper[4757]: I1006 13:39:26.340595 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:26 crc kubenswrapper[4757]: I1006 13:39:26.340617 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:26Z","lastTransitionTime":"2025-10-06T13:39:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:26 crc kubenswrapper[4757]: I1006 13:39:26.444333 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:26 crc kubenswrapper[4757]: I1006 13:39:26.444392 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:26 crc kubenswrapper[4757]: I1006 13:39:26.444404 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:26 crc kubenswrapper[4757]: I1006 13:39:26.444422 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:26 crc kubenswrapper[4757]: I1006 13:39:26.444435 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:26Z","lastTransitionTime":"2025-10-06T13:39:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:26 crc kubenswrapper[4757]: I1006 13:39:26.547258 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:26 crc kubenswrapper[4757]: I1006 13:39:26.547311 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:26 crc kubenswrapper[4757]: I1006 13:39:26.547328 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:26 crc kubenswrapper[4757]: I1006 13:39:26.547352 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:26 crc kubenswrapper[4757]: I1006 13:39:26.547371 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:26Z","lastTransitionTime":"2025-10-06T13:39:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:26 crc kubenswrapper[4757]: I1006 13:39:26.650792 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:26 crc kubenswrapper[4757]: I1006 13:39:26.650863 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:26 crc kubenswrapper[4757]: I1006 13:39:26.650877 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:26 crc kubenswrapper[4757]: I1006 13:39:26.650896 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:26 crc kubenswrapper[4757]: I1006 13:39:26.650909 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:26Z","lastTransitionTime":"2025-10-06T13:39:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:26 crc kubenswrapper[4757]: I1006 13:39:26.754169 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:26 crc kubenswrapper[4757]: I1006 13:39:26.754210 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:26 crc kubenswrapper[4757]: I1006 13:39:26.754222 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:26 crc kubenswrapper[4757]: I1006 13:39:26.754236 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:26 crc kubenswrapper[4757]: I1006 13:39:26.754244 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:26Z","lastTransitionTime":"2025-10-06T13:39:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:26 crc kubenswrapper[4757]: I1006 13:39:26.856737 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:26 crc kubenswrapper[4757]: I1006 13:39:26.856775 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:26 crc kubenswrapper[4757]: I1006 13:39:26.856784 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:26 crc kubenswrapper[4757]: I1006 13:39:26.856797 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:26 crc kubenswrapper[4757]: I1006 13:39:26.856807 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:26Z","lastTransitionTime":"2025-10-06T13:39:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:26 crc kubenswrapper[4757]: I1006 13:39:26.960206 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:26 crc kubenswrapper[4757]: I1006 13:39:26.960294 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:26 crc kubenswrapper[4757]: I1006 13:39:26.960305 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:26 crc kubenswrapper[4757]: I1006 13:39:26.960319 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:26 crc kubenswrapper[4757]: I1006 13:39:26.960328 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:26Z","lastTransitionTime":"2025-10-06T13:39:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:27 crc kubenswrapper[4757]: I1006 13:39:27.062254 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:27 crc kubenswrapper[4757]: I1006 13:39:27.062294 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:27 crc kubenswrapper[4757]: I1006 13:39:27.062302 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:27 crc kubenswrapper[4757]: I1006 13:39:27.062315 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:27 crc kubenswrapper[4757]: I1006 13:39:27.062324 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:27Z","lastTransitionTime":"2025-10-06T13:39:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:27 crc kubenswrapper[4757]: I1006 13:39:27.165209 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:27 crc kubenswrapper[4757]: I1006 13:39:27.165278 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:27 crc kubenswrapper[4757]: I1006 13:39:27.165297 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:27 crc kubenswrapper[4757]: I1006 13:39:27.165333 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:27 crc kubenswrapper[4757]: I1006 13:39:27.165352 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:27Z","lastTransitionTime":"2025-10-06T13:39:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:27 crc kubenswrapper[4757]: I1006 13:39:27.179499 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 06 13:39:27 crc kubenswrapper[4757]: E1006 13:39:27.179641 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 06 13:39:27 crc kubenswrapper[4757]: I1006 13:39:27.268453 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:27 crc kubenswrapper[4757]: I1006 13:39:27.268704 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:27 crc kubenswrapper[4757]: I1006 13:39:27.268715 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:27 crc kubenswrapper[4757]: I1006 13:39:27.268734 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:27 crc kubenswrapper[4757]: I1006 13:39:27.268746 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:27Z","lastTransitionTime":"2025-10-06T13:39:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:27 crc kubenswrapper[4757]: I1006 13:39:27.373084 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:27 crc kubenswrapper[4757]: I1006 13:39:27.373211 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:27 crc kubenswrapper[4757]: I1006 13:39:27.373229 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:27 crc kubenswrapper[4757]: I1006 13:39:27.373253 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:27 crc kubenswrapper[4757]: I1006 13:39:27.373271 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:27Z","lastTransitionTime":"2025-10-06T13:39:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:27 crc kubenswrapper[4757]: I1006 13:39:27.476504 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:27 crc kubenswrapper[4757]: I1006 13:39:27.476556 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:27 crc kubenswrapper[4757]: I1006 13:39:27.476575 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:27 crc kubenswrapper[4757]: I1006 13:39:27.476594 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:27 crc kubenswrapper[4757]: I1006 13:39:27.476605 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:27Z","lastTransitionTime":"2025-10-06T13:39:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:27 crc kubenswrapper[4757]: I1006 13:39:27.579492 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:27 crc kubenswrapper[4757]: I1006 13:39:27.579569 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:27 crc kubenswrapper[4757]: I1006 13:39:27.579592 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:27 crc kubenswrapper[4757]: I1006 13:39:27.579616 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:27 crc kubenswrapper[4757]: I1006 13:39:27.579819 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:27Z","lastTransitionTime":"2025-10-06T13:39:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:27 crc kubenswrapper[4757]: I1006 13:39:27.683833 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:27 crc kubenswrapper[4757]: I1006 13:39:27.683988 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:27 crc kubenswrapper[4757]: I1006 13:39:27.684008 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:27 crc kubenswrapper[4757]: I1006 13:39:27.684038 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:27 crc kubenswrapper[4757]: I1006 13:39:27.684055 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:27Z","lastTransitionTime":"2025-10-06T13:39:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:27 crc kubenswrapper[4757]: I1006 13:39:27.787547 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:27 crc kubenswrapper[4757]: I1006 13:39:27.788048 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:27 crc kubenswrapper[4757]: I1006 13:39:27.788239 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:27 crc kubenswrapper[4757]: I1006 13:39:27.788403 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:27 crc kubenswrapper[4757]: I1006 13:39:27.788534 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:27Z","lastTransitionTime":"2025-10-06T13:39:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:27 crc kubenswrapper[4757]: I1006 13:39:27.892150 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:27 crc kubenswrapper[4757]: I1006 13:39:27.892209 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:27 crc kubenswrapper[4757]: I1006 13:39:27.892225 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:27 crc kubenswrapper[4757]: I1006 13:39:27.892250 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:27 crc kubenswrapper[4757]: I1006 13:39:27.892267 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:27Z","lastTransitionTime":"2025-10-06T13:39:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:27 crc kubenswrapper[4757]: I1006 13:39:27.995551 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:27 crc kubenswrapper[4757]: I1006 13:39:27.995611 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:27 crc kubenswrapper[4757]: I1006 13:39:27.995625 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:27 crc kubenswrapper[4757]: I1006 13:39:27.995645 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:27 crc kubenswrapper[4757]: I1006 13:39:27.995659 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:27Z","lastTransitionTime":"2025-10-06T13:39:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:28 crc kubenswrapper[4757]: I1006 13:39:28.098559 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:28 crc kubenswrapper[4757]: I1006 13:39:28.098619 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:28 crc kubenswrapper[4757]: I1006 13:39:28.098636 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:28 crc kubenswrapper[4757]: I1006 13:39:28.098697 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:28 crc kubenswrapper[4757]: I1006 13:39:28.098719 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:28Z","lastTransitionTime":"2025-10-06T13:39:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:28 crc kubenswrapper[4757]: I1006 13:39:28.179712 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 06 13:39:28 crc kubenswrapper[4757]: E1006 13:39:28.179910 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 06 13:39:28 crc kubenswrapper[4757]: I1006 13:39:28.179920 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 06 13:39:28 crc kubenswrapper[4757]: E1006 13:39:28.180036 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 06 13:39:28 crc kubenswrapper[4757]: I1006 13:39:28.179726 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-sc9qx" Oct 06 13:39:28 crc kubenswrapper[4757]: E1006 13:39:28.180204 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-sc9qx" podUID="8a0a24d2-8946-4710-91f2-cc59ecedb5e3" Oct 06 13:39:28 crc kubenswrapper[4757]: I1006 13:39:28.202344 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:28 crc kubenswrapper[4757]: I1006 13:39:28.202416 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:28 crc kubenswrapper[4757]: I1006 13:39:28.202434 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:28 crc kubenswrapper[4757]: I1006 13:39:28.202489 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:28 crc kubenswrapper[4757]: I1006 13:39:28.202507 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:28Z","lastTransitionTime":"2025-10-06T13:39:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:28 crc kubenswrapper[4757]: I1006 13:39:28.306059 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:28 crc kubenswrapper[4757]: I1006 13:39:28.306144 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:28 crc kubenswrapper[4757]: I1006 13:39:28.306157 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:28 crc kubenswrapper[4757]: I1006 13:39:28.306174 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:28 crc kubenswrapper[4757]: I1006 13:39:28.306186 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:28Z","lastTransitionTime":"2025-10-06T13:39:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:28 crc kubenswrapper[4757]: I1006 13:39:28.409247 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:28 crc kubenswrapper[4757]: I1006 13:39:28.409305 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:28 crc kubenswrapper[4757]: I1006 13:39:28.409324 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:28 crc kubenswrapper[4757]: I1006 13:39:28.409347 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:28 crc kubenswrapper[4757]: I1006 13:39:28.409366 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:28Z","lastTransitionTime":"2025-10-06T13:39:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:28 crc kubenswrapper[4757]: I1006 13:39:28.512617 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:28 crc kubenswrapper[4757]: I1006 13:39:28.512670 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:28 crc kubenswrapper[4757]: I1006 13:39:28.512687 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:28 crc kubenswrapper[4757]: I1006 13:39:28.512709 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:28 crc kubenswrapper[4757]: I1006 13:39:28.512726 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:28Z","lastTransitionTime":"2025-10-06T13:39:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:28 crc kubenswrapper[4757]: I1006 13:39:28.615502 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:28 crc kubenswrapper[4757]: I1006 13:39:28.615561 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:28 crc kubenswrapper[4757]: I1006 13:39:28.615578 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:28 crc kubenswrapper[4757]: I1006 13:39:28.615603 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:28 crc kubenswrapper[4757]: I1006 13:39:28.615620 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:28Z","lastTransitionTime":"2025-10-06T13:39:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:28 crc kubenswrapper[4757]: I1006 13:39:28.717957 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:28 crc kubenswrapper[4757]: I1006 13:39:28.718315 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:28 crc kubenswrapper[4757]: I1006 13:39:28.718455 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:28 crc kubenswrapper[4757]: I1006 13:39:28.718549 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:28 crc kubenswrapper[4757]: I1006 13:39:28.718659 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:28Z","lastTransitionTime":"2025-10-06T13:39:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:28 crc kubenswrapper[4757]: I1006 13:39:28.822524 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:28 crc kubenswrapper[4757]: I1006 13:39:28.822825 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:28 crc kubenswrapper[4757]: I1006 13:39:28.822924 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:28 crc kubenswrapper[4757]: I1006 13:39:28.822993 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:28 crc kubenswrapper[4757]: I1006 13:39:28.823057 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:28Z","lastTransitionTime":"2025-10-06T13:39:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:28 crc kubenswrapper[4757]: I1006 13:39:28.926771 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:28 crc kubenswrapper[4757]: I1006 13:39:28.926830 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:28 crc kubenswrapper[4757]: I1006 13:39:28.926848 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:28 crc kubenswrapper[4757]: I1006 13:39:28.926873 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:28 crc kubenswrapper[4757]: I1006 13:39:28.926891 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:28Z","lastTransitionTime":"2025-10-06T13:39:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:29 crc kubenswrapper[4757]: I1006 13:39:29.031325 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:29 crc kubenswrapper[4757]: I1006 13:39:29.031375 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:29 crc kubenswrapper[4757]: I1006 13:39:29.031385 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:29 crc kubenswrapper[4757]: I1006 13:39:29.031407 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:29 crc kubenswrapper[4757]: I1006 13:39:29.031419 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:29Z","lastTransitionTime":"2025-10-06T13:39:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:29 crc kubenswrapper[4757]: I1006 13:39:29.140282 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:29 crc kubenswrapper[4757]: I1006 13:39:29.140382 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:29 crc kubenswrapper[4757]: I1006 13:39:29.140398 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:29 crc kubenswrapper[4757]: I1006 13:39:29.140414 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:29 crc kubenswrapper[4757]: I1006 13:39:29.140424 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:29Z","lastTransitionTime":"2025-10-06T13:39:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:29 crc kubenswrapper[4757]: I1006 13:39:29.179940 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 06 13:39:29 crc kubenswrapper[4757]: E1006 13:39:29.180210 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 06 13:39:29 crc kubenswrapper[4757]: I1006 13:39:29.199878 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:29 crc kubenswrapper[4757]: I1006 13:39:29.199953 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:29 crc kubenswrapper[4757]: I1006 13:39:29.199980 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:29 crc kubenswrapper[4757]: I1006 13:39:29.200009 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:29 crc kubenswrapper[4757]: I1006 13:39:29.200032 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:29Z","lastTransitionTime":"2025-10-06T13:39:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:29 crc kubenswrapper[4757]: E1006 13:39:29.220886 4757 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:39:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:39:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:29Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:39:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:39:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:29Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e1ec82ec-57e6-47de-8235-c2486415aecd\\\",\\\"systemUUID\\\":\\\"77015af3-b2cf-40c4-8ed8-504c8efcff1f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:29Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:29 crc kubenswrapper[4757]: I1006 13:39:29.226483 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:29 crc kubenswrapper[4757]: I1006 13:39:29.226559 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:29 crc kubenswrapper[4757]: I1006 13:39:29.226572 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:29 crc kubenswrapper[4757]: I1006 13:39:29.226587 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:29 crc kubenswrapper[4757]: I1006 13:39:29.226596 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:29Z","lastTransitionTime":"2025-10-06T13:39:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:29 crc kubenswrapper[4757]: E1006 13:39:29.244620 4757 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:39:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:39:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:29Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:39:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:39:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:29Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e1ec82ec-57e6-47de-8235-c2486415aecd\\\",\\\"systemUUID\\\":\\\"77015af3-b2cf-40c4-8ed8-504c8efcff1f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:29Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:29 crc kubenswrapper[4757]: I1006 13:39:29.249639 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:29 crc kubenswrapper[4757]: I1006 13:39:29.249705 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:29 crc kubenswrapper[4757]: I1006 13:39:29.249714 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:29 crc kubenswrapper[4757]: I1006 13:39:29.249731 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:29 crc kubenswrapper[4757]: I1006 13:39:29.249742 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:29Z","lastTransitionTime":"2025-10-06T13:39:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:29 crc kubenswrapper[4757]: E1006 13:39:29.261698 4757 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:39:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:39:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:29Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:39:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:39:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:29Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e1ec82ec-57e6-47de-8235-c2486415aecd\\\",\\\"systemUUID\\\":\\\"77015af3-b2cf-40c4-8ed8-504c8efcff1f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:29Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:29 crc kubenswrapper[4757]: I1006 13:39:29.266825 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:29 crc kubenswrapper[4757]: I1006 13:39:29.266891 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:29 crc kubenswrapper[4757]: I1006 13:39:29.266906 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:29 crc kubenswrapper[4757]: I1006 13:39:29.266924 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:29 crc kubenswrapper[4757]: I1006 13:39:29.266936 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:29Z","lastTransitionTime":"2025-10-06T13:39:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:29 crc kubenswrapper[4757]: E1006 13:39:29.284555 4757 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:39:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:39:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:29Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:39:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:39:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:29Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e1ec82ec-57e6-47de-8235-c2486415aecd\\\",\\\"systemUUID\\\":\\\"77015af3-b2cf-40c4-8ed8-504c8efcff1f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:29Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:29 crc kubenswrapper[4757]: I1006 13:39:29.289788 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:29 crc kubenswrapper[4757]: I1006 13:39:29.289882 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:29 crc kubenswrapper[4757]: I1006 13:39:29.289910 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:29 crc kubenswrapper[4757]: I1006 13:39:29.289982 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:29 crc kubenswrapper[4757]: I1006 13:39:29.290010 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:29Z","lastTransitionTime":"2025-10-06T13:39:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:29 crc kubenswrapper[4757]: E1006 13:39:29.310226 4757 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:39:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:39:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:29Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:39:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:39:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:29Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e1ec82ec-57e6-47de-8235-c2486415aecd\\\",\\\"systemUUID\\\":\\\"77015af3-b2cf-40c4-8ed8-504c8efcff1f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:29Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:29 crc kubenswrapper[4757]: E1006 13:39:29.310778 4757 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 06 13:39:29 crc kubenswrapper[4757]: I1006 13:39:29.313816 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:29 crc kubenswrapper[4757]: I1006 13:39:29.313857 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:29 crc kubenswrapper[4757]: I1006 13:39:29.313866 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:29 crc kubenswrapper[4757]: I1006 13:39:29.313882 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:29 crc kubenswrapper[4757]: I1006 13:39:29.313894 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:29Z","lastTransitionTime":"2025-10-06T13:39:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:29 crc kubenswrapper[4757]: I1006 13:39:29.415941 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:29 crc kubenswrapper[4757]: I1006 13:39:29.416013 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:29 crc kubenswrapper[4757]: I1006 13:39:29.416036 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:29 crc kubenswrapper[4757]: I1006 13:39:29.416065 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:29 crc kubenswrapper[4757]: I1006 13:39:29.416087 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:29Z","lastTransitionTime":"2025-10-06T13:39:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:29 crc kubenswrapper[4757]: I1006 13:39:29.519191 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:29 crc kubenswrapper[4757]: I1006 13:39:29.519266 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:29 crc kubenswrapper[4757]: I1006 13:39:29.519293 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:29 crc kubenswrapper[4757]: I1006 13:39:29.519324 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:29 crc kubenswrapper[4757]: I1006 13:39:29.519343 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:29Z","lastTransitionTime":"2025-10-06T13:39:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:29 crc kubenswrapper[4757]: I1006 13:39:29.622723 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:29 crc kubenswrapper[4757]: I1006 13:39:29.622787 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:29 crc kubenswrapper[4757]: I1006 13:39:29.622810 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:29 crc kubenswrapper[4757]: I1006 13:39:29.622842 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:29 crc kubenswrapper[4757]: I1006 13:39:29.622865 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:29Z","lastTransitionTime":"2025-10-06T13:39:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:29 crc kubenswrapper[4757]: I1006 13:39:29.726441 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:29 crc kubenswrapper[4757]: I1006 13:39:29.726486 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:29 crc kubenswrapper[4757]: I1006 13:39:29.726498 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:29 crc kubenswrapper[4757]: I1006 13:39:29.726513 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:29 crc kubenswrapper[4757]: I1006 13:39:29.726525 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:29Z","lastTransitionTime":"2025-10-06T13:39:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:29 crc kubenswrapper[4757]: I1006 13:39:29.829924 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:29 crc kubenswrapper[4757]: I1006 13:39:29.829980 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:29 crc kubenswrapper[4757]: I1006 13:39:29.829993 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:29 crc kubenswrapper[4757]: I1006 13:39:29.830013 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:29 crc kubenswrapper[4757]: I1006 13:39:29.830028 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:29Z","lastTransitionTime":"2025-10-06T13:39:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:29 crc kubenswrapper[4757]: I1006 13:39:29.933795 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:29 crc kubenswrapper[4757]: I1006 13:39:29.933842 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:29 crc kubenswrapper[4757]: I1006 13:39:29.933853 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:29 crc kubenswrapper[4757]: I1006 13:39:29.933869 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:29 crc kubenswrapper[4757]: I1006 13:39:29.933880 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:29Z","lastTransitionTime":"2025-10-06T13:39:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:30 crc kubenswrapper[4757]: I1006 13:39:30.037412 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:30 crc kubenswrapper[4757]: I1006 13:39:30.037478 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:30 crc kubenswrapper[4757]: I1006 13:39:30.037498 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:30 crc kubenswrapper[4757]: I1006 13:39:30.037525 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:30 crc kubenswrapper[4757]: I1006 13:39:30.037543 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:30Z","lastTransitionTime":"2025-10-06T13:39:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:30 crc kubenswrapper[4757]: I1006 13:39:30.141355 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:30 crc kubenswrapper[4757]: I1006 13:39:30.141469 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:30 crc kubenswrapper[4757]: I1006 13:39:30.141507 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:30 crc kubenswrapper[4757]: I1006 13:39:30.141539 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:30 crc kubenswrapper[4757]: I1006 13:39:30.141560 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:30Z","lastTransitionTime":"2025-10-06T13:39:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:30 crc kubenswrapper[4757]: I1006 13:39:30.179419 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 06 13:39:30 crc kubenswrapper[4757]: I1006 13:39:30.179433 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 06 13:39:30 crc kubenswrapper[4757]: I1006 13:39:30.179461 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-sc9qx" Oct 06 13:39:30 crc kubenswrapper[4757]: E1006 13:39:30.179715 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 06 13:39:30 crc kubenswrapper[4757]: E1006 13:39:30.179874 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 06 13:39:30 crc kubenswrapper[4757]: E1006 13:39:30.180000 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-sc9qx" podUID="8a0a24d2-8946-4710-91f2-cc59ecedb5e3" Oct 06 13:39:30 crc kubenswrapper[4757]: I1006 13:39:30.245841 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:30 crc kubenswrapper[4757]: I1006 13:39:30.245915 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:30 crc kubenswrapper[4757]: I1006 13:39:30.245941 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:30 crc kubenswrapper[4757]: I1006 13:39:30.245972 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:30 crc kubenswrapper[4757]: I1006 13:39:30.245995 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:30Z","lastTransitionTime":"2025-10-06T13:39:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:30 crc kubenswrapper[4757]: I1006 13:39:30.348938 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:30 crc kubenswrapper[4757]: I1006 13:39:30.348972 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:30 crc kubenswrapper[4757]: I1006 13:39:30.348980 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:30 crc kubenswrapper[4757]: I1006 13:39:30.348992 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:30 crc kubenswrapper[4757]: I1006 13:39:30.349001 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:30Z","lastTransitionTime":"2025-10-06T13:39:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:30 crc kubenswrapper[4757]: I1006 13:39:30.451779 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:30 crc kubenswrapper[4757]: I1006 13:39:30.451866 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:30 crc kubenswrapper[4757]: I1006 13:39:30.451891 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:30 crc kubenswrapper[4757]: I1006 13:39:30.451921 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:30 crc kubenswrapper[4757]: I1006 13:39:30.451946 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:30Z","lastTransitionTime":"2025-10-06T13:39:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:30 crc kubenswrapper[4757]: I1006 13:39:30.554651 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:30 crc kubenswrapper[4757]: I1006 13:39:30.554708 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:30 crc kubenswrapper[4757]: I1006 13:39:30.554724 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:30 crc kubenswrapper[4757]: I1006 13:39:30.554745 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:30 crc kubenswrapper[4757]: I1006 13:39:30.554759 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:30Z","lastTransitionTime":"2025-10-06T13:39:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:30 crc kubenswrapper[4757]: I1006 13:39:30.658243 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:30 crc kubenswrapper[4757]: I1006 13:39:30.658318 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:30 crc kubenswrapper[4757]: I1006 13:39:30.658334 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:30 crc kubenswrapper[4757]: I1006 13:39:30.658395 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:30 crc kubenswrapper[4757]: I1006 13:39:30.658415 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:30Z","lastTransitionTime":"2025-10-06T13:39:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:30 crc kubenswrapper[4757]: I1006 13:39:30.761528 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:30 crc kubenswrapper[4757]: I1006 13:39:30.761605 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:30 crc kubenswrapper[4757]: I1006 13:39:30.761620 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:30 crc kubenswrapper[4757]: I1006 13:39:30.761640 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:30 crc kubenswrapper[4757]: I1006 13:39:30.761655 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:30Z","lastTransitionTime":"2025-10-06T13:39:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:30 crc kubenswrapper[4757]: I1006 13:39:30.864332 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:30 crc kubenswrapper[4757]: I1006 13:39:30.864382 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:30 crc kubenswrapper[4757]: I1006 13:39:30.864401 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:30 crc kubenswrapper[4757]: I1006 13:39:30.864454 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:30 crc kubenswrapper[4757]: I1006 13:39:30.864473 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:30Z","lastTransitionTime":"2025-10-06T13:39:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:30 crc kubenswrapper[4757]: I1006 13:39:30.967296 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:30 crc kubenswrapper[4757]: I1006 13:39:30.967338 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:30 crc kubenswrapper[4757]: I1006 13:39:30.967349 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:30 crc kubenswrapper[4757]: I1006 13:39:30.967364 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:30 crc kubenswrapper[4757]: I1006 13:39:30.967375 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:30Z","lastTransitionTime":"2025-10-06T13:39:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:31 crc kubenswrapper[4757]: I1006 13:39:31.069651 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:31 crc kubenswrapper[4757]: I1006 13:39:31.069686 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:31 crc kubenswrapper[4757]: I1006 13:39:31.069694 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:31 crc kubenswrapper[4757]: I1006 13:39:31.069710 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:31 crc kubenswrapper[4757]: I1006 13:39:31.069719 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:31Z","lastTransitionTime":"2025-10-06T13:39:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:31 crc kubenswrapper[4757]: I1006 13:39:31.172397 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:31 crc kubenswrapper[4757]: I1006 13:39:31.172439 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:31 crc kubenswrapper[4757]: I1006 13:39:31.172448 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:31 crc kubenswrapper[4757]: I1006 13:39:31.172464 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:31 crc kubenswrapper[4757]: I1006 13:39:31.172474 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:31Z","lastTransitionTime":"2025-10-06T13:39:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:31 crc kubenswrapper[4757]: I1006 13:39:31.179707 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 06 13:39:31 crc kubenswrapper[4757]: E1006 13:39:31.179907 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 06 13:39:31 crc kubenswrapper[4757]: I1006 13:39:31.275125 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:31 crc kubenswrapper[4757]: I1006 13:39:31.275176 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:31 crc kubenswrapper[4757]: I1006 13:39:31.275189 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:31 crc kubenswrapper[4757]: I1006 13:39:31.275205 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:31 crc kubenswrapper[4757]: I1006 13:39:31.275215 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:31Z","lastTransitionTime":"2025-10-06T13:39:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:31 crc kubenswrapper[4757]: I1006 13:39:31.378112 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:31 crc kubenswrapper[4757]: I1006 13:39:31.378202 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:31 crc kubenswrapper[4757]: I1006 13:39:31.378215 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:31 crc kubenswrapper[4757]: I1006 13:39:31.378246 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:31 crc kubenswrapper[4757]: I1006 13:39:31.378268 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:31Z","lastTransitionTime":"2025-10-06T13:39:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:31 crc kubenswrapper[4757]: I1006 13:39:31.481006 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:31 crc kubenswrapper[4757]: I1006 13:39:31.481083 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:31 crc kubenswrapper[4757]: I1006 13:39:31.481129 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:31 crc kubenswrapper[4757]: I1006 13:39:31.481156 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:31 crc kubenswrapper[4757]: I1006 13:39:31.481175 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:31Z","lastTransitionTime":"2025-10-06T13:39:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:31 crc kubenswrapper[4757]: I1006 13:39:31.584182 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:31 crc kubenswrapper[4757]: I1006 13:39:31.584263 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:31 crc kubenswrapper[4757]: I1006 13:39:31.584284 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:31 crc kubenswrapper[4757]: I1006 13:39:31.584318 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:31 crc kubenswrapper[4757]: I1006 13:39:31.584351 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:31Z","lastTransitionTime":"2025-10-06T13:39:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:31 crc kubenswrapper[4757]: I1006 13:39:31.687400 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:31 crc kubenswrapper[4757]: I1006 13:39:31.687463 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:31 crc kubenswrapper[4757]: I1006 13:39:31.687480 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:31 crc kubenswrapper[4757]: I1006 13:39:31.687501 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:31 crc kubenswrapper[4757]: I1006 13:39:31.687521 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:31Z","lastTransitionTime":"2025-10-06T13:39:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:31 crc kubenswrapper[4757]: I1006 13:39:31.790772 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:31 crc kubenswrapper[4757]: I1006 13:39:31.790822 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:31 crc kubenswrapper[4757]: I1006 13:39:31.790857 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:31 crc kubenswrapper[4757]: I1006 13:39:31.790881 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:31 crc kubenswrapper[4757]: I1006 13:39:31.790898 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:31Z","lastTransitionTime":"2025-10-06T13:39:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:31 crc kubenswrapper[4757]: I1006 13:39:31.894313 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:31 crc kubenswrapper[4757]: I1006 13:39:31.894381 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:31 crc kubenswrapper[4757]: I1006 13:39:31.894405 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:31 crc kubenswrapper[4757]: I1006 13:39:31.894435 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:31 crc kubenswrapper[4757]: I1006 13:39:31.894459 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:31Z","lastTransitionTime":"2025-10-06T13:39:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:31 crc kubenswrapper[4757]: I1006 13:39:31.997759 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:31 crc kubenswrapper[4757]: I1006 13:39:31.997820 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:31 crc kubenswrapper[4757]: I1006 13:39:31.997842 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:31 crc kubenswrapper[4757]: I1006 13:39:31.997870 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:31 crc kubenswrapper[4757]: I1006 13:39:31.997890 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:31Z","lastTransitionTime":"2025-10-06T13:39:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:32 crc kubenswrapper[4757]: I1006 13:39:32.101182 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:32 crc kubenswrapper[4757]: I1006 13:39:32.101221 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:32 crc kubenswrapper[4757]: I1006 13:39:32.101232 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:32 crc kubenswrapper[4757]: I1006 13:39:32.101248 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:32 crc kubenswrapper[4757]: I1006 13:39:32.101258 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:32Z","lastTransitionTime":"2025-10-06T13:39:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:32 crc kubenswrapper[4757]: I1006 13:39:32.180357 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-sc9qx" Oct 06 13:39:32 crc kubenswrapper[4757]: I1006 13:39:32.180473 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 06 13:39:32 crc kubenswrapper[4757]: E1006 13:39:32.180531 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-sc9qx" podUID="8a0a24d2-8946-4710-91f2-cc59ecedb5e3" Oct 06 13:39:32 crc kubenswrapper[4757]: E1006 13:39:32.180650 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 06 13:39:32 crc kubenswrapper[4757]: I1006 13:39:32.180722 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 06 13:39:32 crc kubenswrapper[4757]: E1006 13:39:32.180806 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 06 13:39:32 crc kubenswrapper[4757]: I1006 13:39:32.202689 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70151c959d98e2cb056e57e50b67c68b1c031beb375b357c8941bc9b81dc040f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:32Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:32 crc kubenswrapper[4757]: I1006 13:39:32.207329 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:32 crc kubenswrapper[4757]: I1006 13:39:32.207382 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:32 crc kubenswrapper[4757]: I1006 13:39:32.207394 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:32 crc kubenswrapper[4757]: I1006 13:39:32.207413 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:32 crc kubenswrapper[4757]: I1006 13:39:32.207448 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:32Z","lastTransitionTime":"2025-10-06T13:39:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:32 crc kubenswrapper[4757]: I1006 13:39:32.217929 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:32Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:32 crc kubenswrapper[4757]: I1006 13:39:32.234812 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6624d05-e024-49f2-bf87-33e7ea4fccbb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbe06e8892901366539846fa68ea4e379c32ca7b21843383dc4a72ac70f92b98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75c8038f49690378c7322e4d2e50f7d5073bb744c3f64317b7c3f4acffaa338d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://df2b012b79afa6379b4a7add72a9093bbdbfcc8d618d6eae350a773fdddacfa8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4093bde478f4910c2b1e2341bb0b3f640454b131e31b9ab7ad5900b8260b7561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1580d51cb7ec22f70787157dc80236a312eca77785329c663d78c02ee9a0685d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38df7deb98a16e465d5fdbad8e04c2e62c5cfb418a48fe9f01da335dba2907ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://737f180b936ea2af92cdc2a3c77290ee6ad30c131c7fd3301562f96387c8a211\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://737f180b936ea2af92cdc2a3c77290ee6ad30c131c7fd3301562f96387c8a211\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-06T13:39:20Z\\\",\\\"message\\\":\\\"e (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1006 13:39:20.135763 6389 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1006 13:39:20.135790 6389 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1006 13:39:20.135830 6389 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1006 13:39:20.135845 6389 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1006 13:39:20.135848 6389 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1006 13:39:20.135872 6389 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1006 13:39:20.135873 6389 handler.go:208] Removed *v1.Node event handler 2\\\\nI1006 13:39:20.135888 6389 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1006 13:39:20.135916 6389 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1006 13:39:20.135920 6389 handler.go:208] Removed *v1.Node event handler 7\\\\nI1006 13:39:20.135944 6389 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1006 13:39:20.135983 6389 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1006 13:39:20.135991 6389 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1006 13:39:20.135996 6389 factory.go:656] Stopping watch factory\\\\nI1006 13:39:20.136011 6389 handler.go:208] Removed *v1.EgressFirewall ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-06T13:39:19Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-58bhb_openshift-ovn-kubernetes(a6624d05-e024-49f2-bf87-33e7ea4fccbb)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5852bc46ae07b295a97cca9f5ef7d06484c12744c0f6ee57b2d14acee319fcaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3fe5f1f425ddd770c518939dee061dc2641c1ca6c3538d8d30ac0724a6353936\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3fe5f1f425ddd770c518939dee061dc2641c1ca6c3538d8d30ac0724a6353936\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-58bhb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:32Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:32 crc kubenswrapper[4757]: I1006 13:39:32.244318 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-sc9qx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a0a24d2-8946-4710-91f2-cc59ecedb5e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwpmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwpmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:39:06Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-sc9qx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:32Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:32 crc kubenswrapper[4757]: I1006 13:39:32.256906 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:32Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:32 crc kubenswrapper[4757]: I1006 13:39:32.267318 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9j5jn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de18b9fe-e396-469e-a6f6-d87ce91f3270\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4b5ed23b64e4bbc9f76f8cdef77d7f608d8ba029b540b69f64faf266f843155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2cslg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9j5jn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:32Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:32 crc kubenswrapper[4757]: I1006 13:39:32.281842 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aab0f26-c4f7-40b2-892f-0c7d8586a1c1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4119269e9f663bc5c38f4fdc8a5b32f1c3d8446fed6fd36172c41207c9c287b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1bef054ed2b7c27f200f7ab089158ba9946ca17b507c85c377504f539812fcf6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb90630132db1396d9009fe35d6d6b8ab6afe59d32f196cf7a10f3179103227\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://123bbe58d8664c8fad1b86c9c321228f7ffe7cdf14c44e1d9be7802e10a3f0d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://813c083e0e8ba3b397a4b2795de1eac82299d7f673832272138f20162b7f2db2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-06T13:38:46Z\\\",\\\"message\\\":\\\"W1006 13:38:35.655009 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1006 13:38:35.655588 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759757915 cert, and key in /tmp/serving-cert-2033919833/serving-signer.crt, /tmp/serving-cert-2033919833/serving-signer.key\\\\nI1006 13:38:35.906575 1 observer_polling.go:159] Starting file observer\\\\nW1006 13:38:35.908632 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1006 13:38:35.908812 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1006 13:38:35.911572 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2033919833/tls.crt::/tmp/serving-cert-2033919833/tls.key\\\\\\\"\\\\nF1006 13:38:46.393904 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:35Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18af66d22d5905beeec34c4d158a88aadb1a8076e423996cd42e4172a31426e0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:35Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca584765a9838801f99c7fb1f7bbcc740e684033b1b19035ef95b72526c6eda1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca584765a9838801f99c7fb1f7bbcc740e684033b1b19035ef95b72526c6eda1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:32Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:32Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:32 crc kubenswrapper[4757]: I1006 13:39:32.298539 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d100743e6a2245c99a227c3380c1e9cbdfec0eff361d21977a3e313fa8f7bfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:32Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:32 crc kubenswrapper[4757]: I1006 13:39:32.312707 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:32 crc kubenswrapper[4757]: I1006 13:39:32.312759 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:32 crc kubenswrapper[4757]: I1006 13:39:32.312777 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:32 crc kubenswrapper[4757]: I1006 13:39:32.312802 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:32 crc kubenswrapper[4757]: I1006 13:39:32.312819 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:32Z","lastTransitionTime":"2025-10-06T13:39:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:32 crc kubenswrapper[4757]: I1006 13:39:32.314486 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9qf7z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9144d9fd-70d7-4a29-8e6b-c020c611980a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7771512cbe14fa9b8d61be3a354a5fb8436b84ddf455ae4426440f1599f7ad90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mf4fb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9qf7z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:32Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:32 crc kubenswrapper[4757]: I1006 13:39:32.333504 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rhrzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62d4cce6-0583-40a6-b7ea-2996d07b49b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://886c6f797531b702123d9931997b7b5e3ebcf9c14c566ff90e0382eae806f925\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a6cfbbedbad94b500e9bc7c5777459ffbf3db038e8a520a2c9c28d381955df55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a6cfbbedbad94b500e9bc7c5777459ffbf3db038e8a520a2c9c28d381955df55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8c16d3a08acde90ccf1beb5e9ced373395af7436ddbc05c24f6489f6f49fc75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8c16d3a08acde90ccf1beb5e9ced373395af7436ddbc05c24f6489f6f49fc75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7471a1d670fcee421cb43e0a8b8a2b5e15c9d428659f8d63be54895493225242\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7471a1d670fcee421cb43e0a8b8a2b5e15c9d428659f8d63be54895493225242\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d5889f0b8d556cbb5791eb0d9d5475c4fa8beb0c5e9614081c8a7002353282a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d5889f0b8d556cbb5791eb0d9d5475c4fa8beb0c5e9614081c8a7002353282a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0e664e1241d006c37ace1635ef93d30b3062960868874ab67b024d872e8b352\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0e664e1241d006c37ace1635ef93d30b3062960868874ab67b024d872e8b352\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4cbf946357ea3bc8763e1c1b16d8794976662c55043426132f1ece7c52dca82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4cbf946357ea3bc8763e1c1b16d8794976662c55043426132f1ece7c52dca82\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rhrzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:32Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:32 crc kubenswrapper[4757]: I1006 13:39:32.349427 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-j889t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a097c3c1-7cfd-4ba8-878d-40b971843e92\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cf3eee034e9bc6cc2729e4cd23569f535ea8d2058f1576f75baaa5cf8407f2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-29wtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:54Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-j889t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:32Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:32 crc kubenswrapper[4757]: I1006 13:39:32.364569 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l2rjj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"34af6a7e-1458-4f7c-bc54-69fb80966b6b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://53547fa1f6011c3003bed92bbd7200050777786dd1fe8b3979b8d9c695c7259d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:39:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5csnl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3d629cc4ee4a614d547fe962c1367b1717f185765e1ccc325fd941f37e3d656\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:39:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5csnl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:39:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-l2rjj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:32Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:32 crc kubenswrapper[4757]: I1006 13:39:32.381614 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cd6138e-cd0c-4140-8173-eeb737e5b20a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e5392ae609235f0d0526e4d115125c20a75af131b89c07ae6bfa3b4bac84108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d06bc2afab6df22e2c8140de3e6288a3e5f4190b6bdb31a2e71170275e70cae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://056d1a4bdaee6e39a0ba0105c9b48d23cd37c37936951e8e2aa123f6de93463d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e08edbfb6418e26eb6c3ef0d229a623bc625e8e20c29d41a95adc8ac8dc2534\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:32Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:32Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:32 crc kubenswrapper[4757]: I1006 13:39:32.399510 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c409c81-c170-4308-98b3-fb1cc65ff1bb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d9249418a973cd8c582a5f126d293b35071887e197f98c0c911823fc76e7ab90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a66ed2ef9cbfedf37b5bc458002c81c7d608f3bb904f5e7480d9874cfe29ada\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://69aec338eb12fbfcd83c476590f12c4a2b4aff507981052b00060900f88a26a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fbe7f72f01daeac82347a11112c1138e744a97c0df2d111fef60c8be750ce39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9fbe7f72f01daeac82347a11112c1138e744a97c0df2d111fef60c8be750ce39\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:32Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:32Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:32 crc kubenswrapper[4757]: I1006 13:39:32.415823 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0010c888-d5ad-4b2b-8309-1647fdf0dee3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a2028c8aaf6b1504a3b00edc15272958a5254f6e621734988e959fe0720f1ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kvf5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://151c97566cda63000d69b51293c84fad70c6e331d3a1b62bd3a03ee5732d5214\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kvf5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-7tb7h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:32Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:32 crc kubenswrapper[4757]: I1006 13:39:32.416847 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:32 crc kubenswrapper[4757]: I1006 13:39:32.416907 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:32 crc kubenswrapper[4757]: I1006 13:39:32.416933 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:32 crc kubenswrapper[4757]: I1006 13:39:32.416967 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:32 crc kubenswrapper[4757]: I1006 13:39:32.416991 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:32Z","lastTransitionTime":"2025-10-06T13:39:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:32 crc kubenswrapper[4757]: I1006 13:39:32.431187 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b45680ebd17f15c56456a2c0eb5c7b58f6f42524be18df11dacbb3f4d904eac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6454ffe81c62fdfd6a5c0ee4aff48e55b1dd83b14b07a93f915fc3023f6bd255\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:32Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:32 crc kubenswrapper[4757]: I1006 13:39:32.446568 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:32Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:32 crc kubenswrapper[4757]: I1006 13:39:32.519868 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:32 crc kubenswrapper[4757]: I1006 13:39:32.519933 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:32 crc kubenswrapper[4757]: I1006 13:39:32.519951 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:32 crc kubenswrapper[4757]: I1006 13:39:32.519973 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:32 crc kubenswrapper[4757]: I1006 13:39:32.519990 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:32Z","lastTransitionTime":"2025-10-06T13:39:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:32 crc kubenswrapper[4757]: I1006 13:39:32.622665 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:32 crc kubenswrapper[4757]: I1006 13:39:32.622737 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:32 crc kubenswrapper[4757]: I1006 13:39:32.622754 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:32 crc kubenswrapper[4757]: I1006 13:39:32.622778 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:32 crc kubenswrapper[4757]: I1006 13:39:32.622797 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:32Z","lastTransitionTime":"2025-10-06T13:39:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:32 crc kubenswrapper[4757]: I1006 13:39:32.726320 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:32 crc kubenswrapper[4757]: I1006 13:39:32.726381 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:32 crc kubenswrapper[4757]: I1006 13:39:32.726399 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:32 crc kubenswrapper[4757]: I1006 13:39:32.726422 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:32 crc kubenswrapper[4757]: I1006 13:39:32.726439 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:32Z","lastTransitionTime":"2025-10-06T13:39:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:32 crc kubenswrapper[4757]: I1006 13:39:32.829550 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:32 crc kubenswrapper[4757]: I1006 13:39:32.829691 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:32 crc kubenswrapper[4757]: I1006 13:39:32.829711 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:32 crc kubenswrapper[4757]: I1006 13:39:32.829785 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:32 crc kubenswrapper[4757]: I1006 13:39:32.829806 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:32Z","lastTransitionTime":"2025-10-06T13:39:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:32 crc kubenswrapper[4757]: I1006 13:39:32.933187 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:32 crc kubenswrapper[4757]: I1006 13:39:32.933275 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:32 crc kubenswrapper[4757]: I1006 13:39:32.933343 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:32 crc kubenswrapper[4757]: I1006 13:39:32.933377 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:32 crc kubenswrapper[4757]: I1006 13:39:32.933400 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:32Z","lastTransitionTime":"2025-10-06T13:39:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:33 crc kubenswrapper[4757]: I1006 13:39:33.036220 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:33 crc kubenswrapper[4757]: I1006 13:39:33.036295 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:33 crc kubenswrapper[4757]: I1006 13:39:33.036318 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:33 crc kubenswrapper[4757]: I1006 13:39:33.036340 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:33 crc kubenswrapper[4757]: I1006 13:39:33.036356 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:33Z","lastTransitionTime":"2025-10-06T13:39:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:33 crc kubenswrapper[4757]: I1006 13:39:33.139857 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:33 crc kubenswrapper[4757]: I1006 13:39:33.139937 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:33 crc kubenswrapper[4757]: I1006 13:39:33.139961 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:33 crc kubenswrapper[4757]: I1006 13:39:33.139993 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:33 crc kubenswrapper[4757]: I1006 13:39:33.140016 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:33Z","lastTransitionTime":"2025-10-06T13:39:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:33 crc kubenswrapper[4757]: I1006 13:39:33.179058 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 06 13:39:33 crc kubenswrapper[4757]: E1006 13:39:33.179299 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 06 13:39:33 crc kubenswrapper[4757]: I1006 13:39:33.243342 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:33 crc kubenswrapper[4757]: I1006 13:39:33.243425 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:33 crc kubenswrapper[4757]: I1006 13:39:33.243448 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:33 crc kubenswrapper[4757]: I1006 13:39:33.243476 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:33 crc kubenswrapper[4757]: I1006 13:39:33.243499 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:33Z","lastTransitionTime":"2025-10-06T13:39:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:33 crc kubenswrapper[4757]: I1006 13:39:33.346374 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:33 crc kubenswrapper[4757]: I1006 13:39:33.346404 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:33 crc kubenswrapper[4757]: I1006 13:39:33.346412 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:33 crc kubenswrapper[4757]: I1006 13:39:33.346424 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:33 crc kubenswrapper[4757]: I1006 13:39:33.346433 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:33Z","lastTransitionTime":"2025-10-06T13:39:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:33 crc kubenswrapper[4757]: I1006 13:39:33.449804 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:33 crc kubenswrapper[4757]: I1006 13:39:33.449873 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:33 crc kubenswrapper[4757]: I1006 13:39:33.449895 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:33 crc kubenswrapper[4757]: I1006 13:39:33.449923 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:33 crc kubenswrapper[4757]: I1006 13:39:33.449961 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:33Z","lastTransitionTime":"2025-10-06T13:39:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:33 crc kubenswrapper[4757]: I1006 13:39:33.552869 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:33 crc kubenswrapper[4757]: I1006 13:39:33.552940 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:33 crc kubenswrapper[4757]: I1006 13:39:33.552958 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:33 crc kubenswrapper[4757]: I1006 13:39:33.552983 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:33 crc kubenswrapper[4757]: I1006 13:39:33.553001 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:33Z","lastTransitionTime":"2025-10-06T13:39:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:33 crc kubenswrapper[4757]: I1006 13:39:33.657393 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:33 crc kubenswrapper[4757]: I1006 13:39:33.657460 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:33 crc kubenswrapper[4757]: I1006 13:39:33.657483 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:33 crc kubenswrapper[4757]: I1006 13:39:33.657510 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:33 crc kubenswrapper[4757]: I1006 13:39:33.657531 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:33Z","lastTransitionTime":"2025-10-06T13:39:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:33 crc kubenswrapper[4757]: I1006 13:39:33.761304 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:33 crc kubenswrapper[4757]: I1006 13:39:33.761362 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:33 crc kubenswrapper[4757]: I1006 13:39:33.761379 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:33 crc kubenswrapper[4757]: I1006 13:39:33.761401 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:33 crc kubenswrapper[4757]: I1006 13:39:33.761417 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:33Z","lastTransitionTime":"2025-10-06T13:39:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:33 crc kubenswrapper[4757]: I1006 13:39:33.864643 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:33 crc kubenswrapper[4757]: I1006 13:39:33.864760 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:33 crc kubenswrapper[4757]: I1006 13:39:33.864788 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:33 crc kubenswrapper[4757]: I1006 13:39:33.864820 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:33 crc kubenswrapper[4757]: I1006 13:39:33.864843 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:33Z","lastTransitionTime":"2025-10-06T13:39:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:33 crc kubenswrapper[4757]: I1006 13:39:33.968007 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:33 crc kubenswrapper[4757]: I1006 13:39:33.968072 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:33 crc kubenswrapper[4757]: I1006 13:39:33.968089 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:33 crc kubenswrapper[4757]: I1006 13:39:33.968140 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:33 crc kubenswrapper[4757]: I1006 13:39:33.968158 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:33Z","lastTransitionTime":"2025-10-06T13:39:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:34 crc kubenswrapper[4757]: I1006 13:39:34.071556 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:34 crc kubenswrapper[4757]: I1006 13:39:34.071622 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:34 crc kubenswrapper[4757]: I1006 13:39:34.071646 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:34 crc kubenswrapper[4757]: I1006 13:39:34.071674 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:34 crc kubenswrapper[4757]: I1006 13:39:34.071695 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:34Z","lastTransitionTime":"2025-10-06T13:39:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:34 crc kubenswrapper[4757]: I1006 13:39:34.175262 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:34 crc kubenswrapper[4757]: I1006 13:39:34.175357 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:34 crc kubenswrapper[4757]: I1006 13:39:34.175379 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:34 crc kubenswrapper[4757]: I1006 13:39:34.175402 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:34 crc kubenswrapper[4757]: I1006 13:39:34.175421 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:34Z","lastTransitionTime":"2025-10-06T13:39:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:34 crc kubenswrapper[4757]: I1006 13:39:34.179705 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 06 13:39:34 crc kubenswrapper[4757]: I1006 13:39:34.179760 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 06 13:39:34 crc kubenswrapper[4757]: I1006 13:39:34.179714 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-sc9qx" Oct 06 13:39:34 crc kubenswrapper[4757]: E1006 13:39:34.179903 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 06 13:39:34 crc kubenswrapper[4757]: E1006 13:39:34.179990 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-sc9qx" podUID="8a0a24d2-8946-4710-91f2-cc59ecedb5e3" Oct 06 13:39:34 crc kubenswrapper[4757]: E1006 13:39:34.180185 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 06 13:39:34 crc kubenswrapper[4757]: I1006 13:39:34.278438 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:34 crc kubenswrapper[4757]: I1006 13:39:34.278492 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:34 crc kubenswrapper[4757]: I1006 13:39:34.278503 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:34 crc kubenswrapper[4757]: I1006 13:39:34.278524 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:34 crc kubenswrapper[4757]: I1006 13:39:34.278537 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:34Z","lastTransitionTime":"2025-10-06T13:39:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:34 crc kubenswrapper[4757]: I1006 13:39:34.381553 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:34 crc kubenswrapper[4757]: I1006 13:39:34.381643 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:34 crc kubenswrapper[4757]: I1006 13:39:34.381654 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:34 crc kubenswrapper[4757]: I1006 13:39:34.381675 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:34 crc kubenswrapper[4757]: I1006 13:39:34.381688 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:34Z","lastTransitionTime":"2025-10-06T13:39:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:34 crc kubenswrapper[4757]: I1006 13:39:34.484154 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:34 crc kubenswrapper[4757]: I1006 13:39:34.484205 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:34 crc kubenswrapper[4757]: I1006 13:39:34.484218 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:34 crc kubenswrapper[4757]: I1006 13:39:34.484239 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:34 crc kubenswrapper[4757]: I1006 13:39:34.484254 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:34Z","lastTransitionTime":"2025-10-06T13:39:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:34 crc kubenswrapper[4757]: I1006 13:39:34.586052 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:34 crc kubenswrapper[4757]: I1006 13:39:34.586145 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:34 crc kubenswrapper[4757]: I1006 13:39:34.586159 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:34 crc kubenswrapper[4757]: I1006 13:39:34.586182 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:34 crc kubenswrapper[4757]: I1006 13:39:34.586196 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:34Z","lastTransitionTime":"2025-10-06T13:39:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:34 crc kubenswrapper[4757]: I1006 13:39:34.688800 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:34 crc kubenswrapper[4757]: I1006 13:39:34.688844 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:34 crc kubenswrapper[4757]: I1006 13:39:34.688852 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:34 crc kubenswrapper[4757]: I1006 13:39:34.688867 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:34 crc kubenswrapper[4757]: I1006 13:39:34.688878 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:34Z","lastTransitionTime":"2025-10-06T13:39:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:34 crc kubenswrapper[4757]: I1006 13:39:34.792017 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:34 crc kubenswrapper[4757]: I1006 13:39:34.792072 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:34 crc kubenswrapper[4757]: I1006 13:39:34.792083 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:34 crc kubenswrapper[4757]: I1006 13:39:34.792114 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:34 crc kubenswrapper[4757]: I1006 13:39:34.792126 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:34Z","lastTransitionTime":"2025-10-06T13:39:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:34 crc kubenswrapper[4757]: I1006 13:39:34.895259 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:34 crc kubenswrapper[4757]: I1006 13:39:34.895347 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:34 crc kubenswrapper[4757]: I1006 13:39:34.895359 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:34 crc kubenswrapper[4757]: I1006 13:39:34.895376 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:34 crc kubenswrapper[4757]: I1006 13:39:34.895389 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:34Z","lastTransitionTime":"2025-10-06T13:39:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:34 crc kubenswrapper[4757]: I1006 13:39:34.998037 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:34 crc kubenswrapper[4757]: I1006 13:39:34.998118 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:34 crc kubenswrapper[4757]: I1006 13:39:34.998133 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:34 crc kubenswrapper[4757]: I1006 13:39:34.998149 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:34 crc kubenswrapper[4757]: I1006 13:39:34.998162 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:34Z","lastTransitionTime":"2025-10-06T13:39:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:35 crc kubenswrapper[4757]: I1006 13:39:35.100689 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:35 crc kubenswrapper[4757]: I1006 13:39:35.100789 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:35 crc kubenswrapper[4757]: I1006 13:39:35.100803 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:35 crc kubenswrapper[4757]: I1006 13:39:35.100819 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:35 crc kubenswrapper[4757]: I1006 13:39:35.100831 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:35Z","lastTransitionTime":"2025-10-06T13:39:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:35 crc kubenswrapper[4757]: I1006 13:39:35.179930 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 06 13:39:35 crc kubenswrapper[4757]: E1006 13:39:35.180293 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 06 13:39:35 crc kubenswrapper[4757]: I1006 13:39:35.180458 4757 scope.go:117] "RemoveContainer" containerID="737f180b936ea2af92cdc2a3c77290ee6ad30c131c7fd3301562f96387c8a211" Oct 06 13:39:35 crc kubenswrapper[4757]: E1006 13:39:35.180701 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-58bhb_openshift-ovn-kubernetes(a6624d05-e024-49f2-bf87-33e7ea4fccbb)\"" pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" podUID="a6624d05-e024-49f2-bf87-33e7ea4fccbb" Oct 06 13:39:35 crc kubenswrapper[4757]: I1006 13:39:35.203696 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:35 crc kubenswrapper[4757]: I1006 13:39:35.203772 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:35 crc kubenswrapper[4757]: I1006 13:39:35.203792 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:35 crc kubenswrapper[4757]: I1006 13:39:35.203817 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:35 crc kubenswrapper[4757]: I1006 13:39:35.203836 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:35Z","lastTransitionTime":"2025-10-06T13:39:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:35 crc kubenswrapper[4757]: I1006 13:39:35.306254 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:35 crc kubenswrapper[4757]: I1006 13:39:35.306338 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:35 crc kubenswrapper[4757]: I1006 13:39:35.306355 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:35 crc kubenswrapper[4757]: I1006 13:39:35.306380 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:35 crc kubenswrapper[4757]: I1006 13:39:35.306398 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:35Z","lastTransitionTime":"2025-10-06T13:39:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:35 crc kubenswrapper[4757]: I1006 13:39:35.409868 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:35 crc kubenswrapper[4757]: I1006 13:39:35.409939 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:35 crc kubenswrapper[4757]: I1006 13:39:35.409957 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:35 crc kubenswrapper[4757]: I1006 13:39:35.409980 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:35 crc kubenswrapper[4757]: I1006 13:39:35.409998 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:35Z","lastTransitionTime":"2025-10-06T13:39:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:35 crc kubenswrapper[4757]: I1006 13:39:35.512009 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:35 crc kubenswrapper[4757]: I1006 13:39:35.512068 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:35 crc kubenswrapper[4757]: I1006 13:39:35.512116 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:35 crc kubenswrapper[4757]: I1006 13:39:35.512133 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:35 crc kubenswrapper[4757]: I1006 13:39:35.512143 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:35Z","lastTransitionTime":"2025-10-06T13:39:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:35 crc kubenswrapper[4757]: I1006 13:39:35.614874 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:35 crc kubenswrapper[4757]: I1006 13:39:35.614921 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:35 crc kubenswrapper[4757]: I1006 13:39:35.614937 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:35 crc kubenswrapper[4757]: I1006 13:39:35.614958 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:35 crc kubenswrapper[4757]: I1006 13:39:35.614975 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:35Z","lastTransitionTime":"2025-10-06T13:39:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:35 crc kubenswrapper[4757]: I1006 13:39:35.718190 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:35 crc kubenswrapper[4757]: I1006 13:39:35.718246 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:35 crc kubenswrapper[4757]: I1006 13:39:35.718268 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:35 crc kubenswrapper[4757]: I1006 13:39:35.718295 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:35 crc kubenswrapper[4757]: I1006 13:39:35.718315 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:35Z","lastTransitionTime":"2025-10-06T13:39:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:35 crc kubenswrapper[4757]: I1006 13:39:35.821678 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:35 crc kubenswrapper[4757]: I1006 13:39:35.821817 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:35 crc kubenswrapper[4757]: I1006 13:39:35.821838 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:35 crc kubenswrapper[4757]: I1006 13:39:35.821920 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:35 crc kubenswrapper[4757]: I1006 13:39:35.822480 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:35Z","lastTransitionTime":"2025-10-06T13:39:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:35 crc kubenswrapper[4757]: I1006 13:39:35.924341 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:35 crc kubenswrapper[4757]: I1006 13:39:35.924437 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:35 crc kubenswrapper[4757]: I1006 13:39:35.924454 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:35 crc kubenswrapper[4757]: I1006 13:39:35.924475 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:35 crc kubenswrapper[4757]: I1006 13:39:35.924492 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:35Z","lastTransitionTime":"2025-10-06T13:39:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:36 crc kubenswrapper[4757]: I1006 13:39:36.026508 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:36 crc kubenswrapper[4757]: I1006 13:39:36.026539 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:36 crc kubenswrapper[4757]: I1006 13:39:36.026555 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:36 crc kubenswrapper[4757]: I1006 13:39:36.026573 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:36 crc kubenswrapper[4757]: I1006 13:39:36.026586 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:36Z","lastTransitionTime":"2025-10-06T13:39:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:36 crc kubenswrapper[4757]: I1006 13:39:36.129107 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:36 crc kubenswrapper[4757]: I1006 13:39:36.129149 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:36 crc kubenswrapper[4757]: I1006 13:39:36.129160 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:36 crc kubenswrapper[4757]: I1006 13:39:36.129175 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:36 crc kubenswrapper[4757]: I1006 13:39:36.129189 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:36Z","lastTransitionTime":"2025-10-06T13:39:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:36 crc kubenswrapper[4757]: I1006 13:39:36.180018 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 06 13:39:36 crc kubenswrapper[4757]: I1006 13:39:36.180058 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 06 13:39:36 crc kubenswrapper[4757]: I1006 13:39:36.180146 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-sc9qx" Oct 06 13:39:36 crc kubenswrapper[4757]: E1006 13:39:36.180228 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 06 13:39:36 crc kubenswrapper[4757]: E1006 13:39:36.180315 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 06 13:39:36 crc kubenswrapper[4757]: E1006 13:39:36.180386 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-sc9qx" podUID="8a0a24d2-8946-4710-91f2-cc59ecedb5e3" Oct 06 13:39:36 crc kubenswrapper[4757]: I1006 13:39:36.231252 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:36 crc kubenswrapper[4757]: I1006 13:39:36.231309 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:36 crc kubenswrapper[4757]: I1006 13:39:36.231323 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:36 crc kubenswrapper[4757]: I1006 13:39:36.231341 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:36 crc kubenswrapper[4757]: I1006 13:39:36.231354 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:36Z","lastTransitionTime":"2025-10-06T13:39:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:36 crc kubenswrapper[4757]: I1006 13:39:36.334166 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:36 crc kubenswrapper[4757]: I1006 13:39:36.334220 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:36 crc kubenswrapper[4757]: I1006 13:39:36.334229 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:36 crc kubenswrapper[4757]: I1006 13:39:36.334249 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:36 crc kubenswrapper[4757]: I1006 13:39:36.334259 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:36Z","lastTransitionTime":"2025-10-06T13:39:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:36 crc kubenswrapper[4757]: I1006 13:39:36.436328 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:36 crc kubenswrapper[4757]: I1006 13:39:36.436393 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:36 crc kubenswrapper[4757]: I1006 13:39:36.436407 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:36 crc kubenswrapper[4757]: I1006 13:39:36.436431 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:36 crc kubenswrapper[4757]: I1006 13:39:36.436448 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:36Z","lastTransitionTime":"2025-10-06T13:39:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:36 crc kubenswrapper[4757]: I1006 13:39:36.539615 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:36 crc kubenswrapper[4757]: I1006 13:39:36.539669 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:36 crc kubenswrapper[4757]: I1006 13:39:36.539681 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:36 crc kubenswrapper[4757]: I1006 13:39:36.539699 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:36 crc kubenswrapper[4757]: I1006 13:39:36.539710 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:36Z","lastTransitionTime":"2025-10-06T13:39:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:36 crc kubenswrapper[4757]: I1006 13:39:36.642403 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:36 crc kubenswrapper[4757]: I1006 13:39:36.642446 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:36 crc kubenswrapper[4757]: I1006 13:39:36.642460 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:36 crc kubenswrapper[4757]: I1006 13:39:36.642477 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:36 crc kubenswrapper[4757]: I1006 13:39:36.642491 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:36Z","lastTransitionTime":"2025-10-06T13:39:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:36 crc kubenswrapper[4757]: I1006 13:39:36.745883 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:36 crc kubenswrapper[4757]: I1006 13:39:36.745955 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:36 crc kubenswrapper[4757]: I1006 13:39:36.745979 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:36 crc kubenswrapper[4757]: I1006 13:39:36.746007 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:36 crc kubenswrapper[4757]: I1006 13:39:36.746027 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:36Z","lastTransitionTime":"2025-10-06T13:39:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:36 crc kubenswrapper[4757]: I1006 13:39:36.848762 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:36 crc kubenswrapper[4757]: I1006 13:39:36.848837 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:36 crc kubenswrapper[4757]: I1006 13:39:36.848860 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:36 crc kubenswrapper[4757]: I1006 13:39:36.848891 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:36 crc kubenswrapper[4757]: I1006 13:39:36.848915 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:36Z","lastTransitionTime":"2025-10-06T13:39:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:36 crc kubenswrapper[4757]: I1006 13:39:36.951681 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:36 crc kubenswrapper[4757]: I1006 13:39:36.951745 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:36 crc kubenswrapper[4757]: I1006 13:39:36.951762 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:36 crc kubenswrapper[4757]: I1006 13:39:36.951785 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:36 crc kubenswrapper[4757]: I1006 13:39:36.951804 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:36Z","lastTransitionTime":"2025-10-06T13:39:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:37 crc kubenswrapper[4757]: I1006 13:39:37.053905 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:37 crc kubenswrapper[4757]: I1006 13:39:37.053977 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:37 crc kubenswrapper[4757]: I1006 13:39:37.053995 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:37 crc kubenswrapper[4757]: I1006 13:39:37.054026 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:37 crc kubenswrapper[4757]: I1006 13:39:37.054043 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:37Z","lastTransitionTime":"2025-10-06T13:39:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:37 crc kubenswrapper[4757]: I1006 13:39:37.156435 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:37 crc kubenswrapper[4757]: I1006 13:39:37.156491 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:37 crc kubenswrapper[4757]: I1006 13:39:37.156502 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:37 crc kubenswrapper[4757]: I1006 13:39:37.156523 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:37 crc kubenswrapper[4757]: I1006 13:39:37.156535 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:37Z","lastTransitionTime":"2025-10-06T13:39:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:37 crc kubenswrapper[4757]: I1006 13:39:37.179951 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 06 13:39:37 crc kubenswrapper[4757]: E1006 13:39:37.180237 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 06 13:39:37 crc kubenswrapper[4757]: I1006 13:39:37.259665 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:37 crc kubenswrapper[4757]: I1006 13:39:37.259740 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:37 crc kubenswrapper[4757]: I1006 13:39:37.259765 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:37 crc kubenswrapper[4757]: I1006 13:39:37.259799 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:37 crc kubenswrapper[4757]: I1006 13:39:37.259818 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:37Z","lastTransitionTime":"2025-10-06T13:39:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:37 crc kubenswrapper[4757]: I1006 13:39:37.363068 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:37 crc kubenswrapper[4757]: I1006 13:39:37.363146 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:37 crc kubenswrapper[4757]: I1006 13:39:37.363168 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:37 crc kubenswrapper[4757]: I1006 13:39:37.363192 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:37 crc kubenswrapper[4757]: I1006 13:39:37.363211 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:37Z","lastTransitionTime":"2025-10-06T13:39:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:37 crc kubenswrapper[4757]: I1006 13:39:37.465300 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:37 crc kubenswrapper[4757]: I1006 13:39:37.465393 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:37 crc kubenswrapper[4757]: I1006 13:39:37.465429 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:37 crc kubenswrapper[4757]: I1006 13:39:37.465446 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:37 crc kubenswrapper[4757]: I1006 13:39:37.465455 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:37Z","lastTransitionTime":"2025-10-06T13:39:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:37 crc kubenswrapper[4757]: I1006 13:39:37.569341 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:37 crc kubenswrapper[4757]: I1006 13:39:37.569421 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:37 crc kubenswrapper[4757]: I1006 13:39:37.569445 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:37 crc kubenswrapper[4757]: I1006 13:39:37.569475 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:37 crc kubenswrapper[4757]: I1006 13:39:37.569499 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:37Z","lastTransitionTime":"2025-10-06T13:39:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:37 crc kubenswrapper[4757]: I1006 13:39:37.671876 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:37 crc kubenswrapper[4757]: I1006 13:39:37.671934 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:37 crc kubenswrapper[4757]: I1006 13:39:37.671953 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:37 crc kubenswrapper[4757]: I1006 13:39:37.671978 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:37 crc kubenswrapper[4757]: I1006 13:39:37.671998 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:37Z","lastTransitionTime":"2025-10-06T13:39:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:37 crc kubenswrapper[4757]: I1006 13:39:37.774759 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:37 crc kubenswrapper[4757]: I1006 13:39:37.774806 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:37 crc kubenswrapper[4757]: I1006 13:39:37.774819 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:37 crc kubenswrapper[4757]: I1006 13:39:37.774837 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:37 crc kubenswrapper[4757]: I1006 13:39:37.774850 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:37Z","lastTransitionTime":"2025-10-06T13:39:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:37 crc kubenswrapper[4757]: I1006 13:39:37.877471 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:37 crc kubenswrapper[4757]: I1006 13:39:37.877530 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:37 crc kubenswrapper[4757]: I1006 13:39:37.877539 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:37 crc kubenswrapper[4757]: I1006 13:39:37.877553 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:37 crc kubenswrapper[4757]: I1006 13:39:37.877565 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:37Z","lastTransitionTime":"2025-10-06T13:39:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:37 crc kubenswrapper[4757]: I1006 13:39:37.980198 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:37 crc kubenswrapper[4757]: I1006 13:39:37.980241 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:37 crc kubenswrapper[4757]: I1006 13:39:37.980249 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:37 crc kubenswrapper[4757]: I1006 13:39:37.980263 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:37 crc kubenswrapper[4757]: I1006 13:39:37.980275 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:37Z","lastTransitionTime":"2025-10-06T13:39:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:38 crc kubenswrapper[4757]: I1006 13:39:38.083204 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:38 crc kubenswrapper[4757]: I1006 13:39:38.083263 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:38 crc kubenswrapper[4757]: I1006 13:39:38.083279 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:38 crc kubenswrapper[4757]: I1006 13:39:38.083303 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:38 crc kubenswrapper[4757]: I1006 13:39:38.083320 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:38Z","lastTransitionTime":"2025-10-06T13:39:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:38 crc kubenswrapper[4757]: I1006 13:39:38.135724 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/8a0a24d2-8946-4710-91f2-cc59ecedb5e3-metrics-certs\") pod \"network-metrics-daemon-sc9qx\" (UID: \"8a0a24d2-8946-4710-91f2-cc59ecedb5e3\") " pod="openshift-multus/network-metrics-daemon-sc9qx" Oct 06 13:39:38 crc kubenswrapper[4757]: E1006 13:39:38.135919 4757 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 06 13:39:38 crc kubenswrapper[4757]: E1006 13:39:38.135997 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/8a0a24d2-8946-4710-91f2-cc59ecedb5e3-metrics-certs podName:8a0a24d2-8946-4710-91f2-cc59ecedb5e3 nodeName:}" failed. No retries permitted until 2025-10-06 13:40:10.135974867 +0000 UTC m=+98.633293494 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/8a0a24d2-8946-4710-91f2-cc59ecedb5e3-metrics-certs") pod "network-metrics-daemon-sc9qx" (UID: "8a0a24d2-8946-4710-91f2-cc59ecedb5e3") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 06 13:39:38 crc kubenswrapper[4757]: I1006 13:39:38.179216 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 06 13:39:38 crc kubenswrapper[4757]: I1006 13:39:38.179270 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 06 13:39:38 crc kubenswrapper[4757]: E1006 13:39:38.179445 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 06 13:39:38 crc kubenswrapper[4757]: I1006 13:39:38.179274 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-sc9qx" Oct 06 13:39:38 crc kubenswrapper[4757]: E1006 13:39:38.179622 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 06 13:39:38 crc kubenswrapper[4757]: E1006 13:39:38.179729 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-sc9qx" podUID="8a0a24d2-8946-4710-91f2-cc59ecedb5e3" Oct 06 13:39:38 crc kubenswrapper[4757]: I1006 13:39:38.185851 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:38 crc kubenswrapper[4757]: I1006 13:39:38.185887 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:38 crc kubenswrapper[4757]: I1006 13:39:38.185900 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:38 crc kubenswrapper[4757]: I1006 13:39:38.185916 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:38 crc kubenswrapper[4757]: I1006 13:39:38.185929 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:38Z","lastTransitionTime":"2025-10-06T13:39:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:38 crc kubenswrapper[4757]: I1006 13:39:38.289418 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:38 crc kubenswrapper[4757]: I1006 13:39:38.289474 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:38 crc kubenswrapper[4757]: I1006 13:39:38.289486 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:38 crc kubenswrapper[4757]: I1006 13:39:38.289506 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:38 crc kubenswrapper[4757]: I1006 13:39:38.289522 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:38Z","lastTransitionTime":"2025-10-06T13:39:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:38 crc kubenswrapper[4757]: I1006 13:39:38.392658 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:38 crc kubenswrapper[4757]: I1006 13:39:38.392699 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:38 crc kubenswrapper[4757]: I1006 13:39:38.392715 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:38 crc kubenswrapper[4757]: I1006 13:39:38.392732 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:38 crc kubenswrapper[4757]: I1006 13:39:38.392742 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:38Z","lastTransitionTime":"2025-10-06T13:39:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:38 crc kubenswrapper[4757]: I1006 13:39:38.495499 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:38 crc kubenswrapper[4757]: I1006 13:39:38.495538 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:38 crc kubenswrapper[4757]: I1006 13:39:38.495547 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:38 crc kubenswrapper[4757]: I1006 13:39:38.495565 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:38 crc kubenswrapper[4757]: I1006 13:39:38.495576 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:38Z","lastTransitionTime":"2025-10-06T13:39:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:38 crc kubenswrapper[4757]: I1006 13:39:38.597819 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:38 crc kubenswrapper[4757]: I1006 13:39:38.597881 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:38 crc kubenswrapper[4757]: I1006 13:39:38.597906 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:38 crc kubenswrapper[4757]: I1006 13:39:38.597933 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:38 crc kubenswrapper[4757]: I1006 13:39:38.598043 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:38Z","lastTransitionTime":"2025-10-06T13:39:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:38 crc kubenswrapper[4757]: I1006 13:39:38.701181 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:38 crc kubenswrapper[4757]: I1006 13:39:38.701240 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:38 crc kubenswrapper[4757]: I1006 13:39:38.701251 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:38 crc kubenswrapper[4757]: I1006 13:39:38.701297 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:38 crc kubenswrapper[4757]: I1006 13:39:38.701311 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:38Z","lastTransitionTime":"2025-10-06T13:39:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:38 crc kubenswrapper[4757]: I1006 13:39:38.803819 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:38 crc kubenswrapper[4757]: I1006 13:39:38.803844 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:38 crc kubenswrapper[4757]: I1006 13:39:38.803852 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:38 crc kubenswrapper[4757]: I1006 13:39:38.803862 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:38 crc kubenswrapper[4757]: I1006 13:39:38.803870 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:38Z","lastTransitionTime":"2025-10-06T13:39:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:38 crc kubenswrapper[4757]: I1006 13:39:38.906009 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:38 crc kubenswrapper[4757]: I1006 13:39:38.906087 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:38 crc kubenswrapper[4757]: I1006 13:39:38.906118 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:38 crc kubenswrapper[4757]: I1006 13:39:38.906140 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:38 crc kubenswrapper[4757]: I1006 13:39:38.906180 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:38Z","lastTransitionTime":"2025-10-06T13:39:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:39 crc kubenswrapper[4757]: I1006 13:39:39.009656 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:39 crc kubenswrapper[4757]: I1006 13:39:39.009762 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:39 crc kubenswrapper[4757]: I1006 13:39:39.009775 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:39 crc kubenswrapper[4757]: I1006 13:39:39.009822 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:39 crc kubenswrapper[4757]: I1006 13:39:39.009850 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:39Z","lastTransitionTime":"2025-10-06T13:39:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:39 crc kubenswrapper[4757]: I1006 13:39:39.112840 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:39 crc kubenswrapper[4757]: I1006 13:39:39.112890 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:39 crc kubenswrapper[4757]: I1006 13:39:39.112899 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:39 crc kubenswrapper[4757]: I1006 13:39:39.112916 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:39 crc kubenswrapper[4757]: I1006 13:39:39.112926 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:39Z","lastTransitionTime":"2025-10-06T13:39:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:39 crc kubenswrapper[4757]: I1006 13:39:39.179779 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 06 13:39:39 crc kubenswrapper[4757]: E1006 13:39:39.179967 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 06 13:39:39 crc kubenswrapper[4757]: I1006 13:39:39.216555 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:39 crc kubenswrapper[4757]: I1006 13:39:39.216596 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:39 crc kubenswrapper[4757]: I1006 13:39:39.216608 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:39 crc kubenswrapper[4757]: I1006 13:39:39.216623 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:39 crc kubenswrapper[4757]: I1006 13:39:39.216633 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:39Z","lastTransitionTime":"2025-10-06T13:39:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:39 crc kubenswrapper[4757]: I1006 13:39:39.319612 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:39 crc kubenswrapper[4757]: I1006 13:39:39.319657 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:39 crc kubenswrapper[4757]: I1006 13:39:39.319669 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:39 crc kubenswrapper[4757]: I1006 13:39:39.319685 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:39 crc kubenswrapper[4757]: I1006 13:39:39.319702 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:39Z","lastTransitionTime":"2025-10-06T13:39:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:39 crc kubenswrapper[4757]: I1006 13:39:39.421842 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:39 crc kubenswrapper[4757]: I1006 13:39:39.421880 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:39 crc kubenswrapper[4757]: I1006 13:39:39.421891 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:39 crc kubenswrapper[4757]: I1006 13:39:39.421907 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:39 crc kubenswrapper[4757]: I1006 13:39:39.421919 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:39Z","lastTransitionTime":"2025-10-06T13:39:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:39 crc kubenswrapper[4757]: I1006 13:39:39.499842 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:39 crc kubenswrapper[4757]: I1006 13:39:39.499890 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:39 crc kubenswrapper[4757]: I1006 13:39:39.499900 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:39 crc kubenswrapper[4757]: I1006 13:39:39.499914 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:39 crc kubenswrapper[4757]: I1006 13:39:39.499924 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:39Z","lastTransitionTime":"2025-10-06T13:39:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:39 crc kubenswrapper[4757]: E1006 13:39:39.515772 4757 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:39:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:39:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:39Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:39:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:39:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:39Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e1ec82ec-57e6-47de-8235-c2486415aecd\\\",\\\"systemUUID\\\":\\\"77015af3-b2cf-40c4-8ed8-504c8efcff1f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:39Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:39 crc kubenswrapper[4757]: I1006 13:39:39.524791 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:39 crc kubenswrapper[4757]: I1006 13:39:39.524833 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:39 crc kubenswrapper[4757]: I1006 13:39:39.524843 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:39 crc kubenswrapper[4757]: I1006 13:39:39.524857 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:39 crc kubenswrapper[4757]: I1006 13:39:39.524867 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:39Z","lastTransitionTime":"2025-10-06T13:39:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:39 crc kubenswrapper[4757]: E1006 13:39:39.540869 4757 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:39:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:39:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:39Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:39:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:39:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:39Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e1ec82ec-57e6-47de-8235-c2486415aecd\\\",\\\"systemUUID\\\":\\\"77015af3-b2cf-40c4-8ed8-504c8efcff1f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:39Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:39 crc kubenswrapper[4757]: I1006 13:39:39.545217 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:39 crc kubenswrapper[4757]: I1006 13:39:39.545257 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:39 crc kubenswrapper[4757]: I1006 13:39:39.545269 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:39 crc kubenswrapper[4757]: I1006 13:39:39.545287 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:39 crc kubenswrapper[4757]: I1006 13:39:39.545299 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:39Z","lastTransitionTime":"2025-10-06T13:39:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:39 crc kubenswrapper[4757]: E1006 13:39:39.560570 4757 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:39:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:39:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:39Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:39:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:39:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:39Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e1ec82ec-57e6-47de-8235-c2486415aecd\\\",\\\"systemUUID\\\":\\\"77015af3-b2cf-40c4-8ed8-504c8efcff1f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:39Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:39 crc kubenswrapper[4757]: I1006 13:39:39.564613 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:39 crc kubenswrapper[4757]: I1006 13:39:39.564692 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:39 crc kubenswrapper[4757]: I1006 13:39:39.564705 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:39 crc kubenswrapper[4757]: I1006 13:39:39.564724 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:39 crc kubenswrapper[4757]: I1006 13:39:39.564736 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:39Z","lastTransitionTime":"2025-10-06T13:39:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:39 crc kubenswrapper[4757]: E1006 13:39:39.578547 4757 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:39:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:39:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:39Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:39:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:39:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:39Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e1ec82ec-57e6-47de-8235-c2486415aecd\\\",\\\"systemUUID\\\":\\\"77015af3-b2cf-40c4-8ed8-504c8efcff1f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:39Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:39 crc kubenswrapper[4757]: I1006 13:39:39.582286 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:39 crc kubenswrapper[4757]: I1006 13:39:39.582342 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:39 crc kubenswrapper[4757]: I1006 13:39:39.582354 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:39 crc kubenswrapper[4757]: I1006 13:39:39.582379 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:39 crc kubenswrapper[4757]: I1006 13:39:39.582391 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:39Z","lastTransitionTime":"2025-10-06T13:39:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:39 crc kubenswrapper[4757]: E1006 13:39:39.592965 4757 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:39:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:39:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:39Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:39:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:39:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:39Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e1ec82ec-57e6-47de-8235-c2486415aecd\\\",\\\"systemUUID\\\":\\\"77015af3-b2cf-40c4-8ed8-504c8efcff1f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:39Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:39 crc kubenswrapper[4757]: E1006 13:39:39.593121 4757 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 06 13:39:39 crc kubenswrapper[4757]: I1006 13:39:39.594735 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:39 crc kubenswrapper[4757]: I1006 13:39:39.594767 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:39 crc kubenswrapper[4757]: I1006 13:39:39.594776 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:39 crc kubenswrapper[4757]: I1006 13:39:39.594791 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:39 crc kubenswrapper[4757]: I1006 13:39:39.594803 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:39Z","lastTransitionTime":"2025-10-06T13:39:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:39 crc kubenswrapper[4757]: I1006 13:39:39.698072 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:39 crc kubenswrapper[4757]: I1006 13:39:39.698141 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:39 crc kubenswrapper[4757]: I1006 13:39:39.698158 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:39 crc kubenswrapper[4757]: I1006 13:39:39.698177 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:39 crc kubenswrapper[4757]: I1006 13:39:39.698194 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:39Z","lastTransitionTime":"2025-10-06T13:39:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:39 crc kubenswrapper[4757]: I1006 13:39:39.801064 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:39 crc kubenswrapper[4757]: I1006 13:39:39.801128 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:39 crc kubenswrapper[4757]: I1006 13:39:39.801139 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:39 crc kubenswrapper[4757]: I1006 13:39:39.801155 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:39 crc kubenswrapper[4757]: I1006 13:39:39.801167 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:39Z","lastTransitionTime":"2025-10-06T13:39:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:39 crc kubenswrapper[4757]: I1006 13:39:39.903776 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:39 crc kubenswrapper[4757]: I1006 13:39:39.903847 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:39 crc kubenswrapper[4757]: I1006 13:39:39.903871 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:39 crc kubenswrapper[4757]: I1006 13:39:39.903902 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:39 crc kubenswrapper[4757]: I1006 13:39:39.903924 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:39Z","lastTransitionTime":"2025-10-06T13:39:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:40 crc kubenswrapper[4757]: I1006 13:39:40.006219 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:40 crc kubenswrapper[4757]: I1006 13:39:40.006258 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:40 crc kubenswrapper[4757]: I1006 13:39:40.006266 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:40 crc kubenswrapper[4757]: I1006 13:39:40.006280 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:40 crc kubenswrapper[4757]: I1006 13:39:40.006291 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:40Z","lastTransitionTime":"2025-10-06T13:39:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:40 crc kubenswrapper[4757]: I1006 13:39:40.108978 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:40 crc kubenswrapper[4757]: I1006 13:39:40.109025 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:40 crc kubenswrapper[4757]: I1006 13:39:40.109036 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:40 crc kubenswrapper[4757]: I1006 13:39:40.109056 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:40 crc kubenswrapper[4757]: I1006 13:39:40.109068 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:40Z","lastTransitionTime":"2025-10-06T13:39:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:40 crc kubenswrapper[4757]: I1006 13:39:40.179806 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 06 13:39:40 crc kubenswrapper[4757]: I1006 13:39:40.179852 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 06 13:39:40 crc kubenswrapper[4757]: I1006 13:39:40.179897 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-sc9qx" Oct 06 13:39:40 crc kubenswrapper[4757]: E1006 13:39:40.180002 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 06 13:39:40 crc kubenswrapper[4757]: E1006 13:39:40.180141 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 06 13:39:40 crc kubenswrapper[4757]: E1006 13:39:40.180206 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-sc9qx" podUID="8a0a24d2-8946-4710-91f2-cc59ecedb5e3" Oct 06 13:39:40 crc kubenswrapper[4757]: I1006 13:39:40.211310 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:40 crc kubenswrapper[4757]: I1006 13:39:40.211350 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:40 crc kubenswrapper[4757]: I1006 13:39:40.211359 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:40 crc kubenswrapper[4757]: I1006 13:39:40.211371 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:40 crc kubenswrapper[4757]: I1006 13:39:40.211380 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:40Z","lastTransitionTime":"2025-10-06T13:39:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:40 crc kubenswrapper[4757]: I1006 13:39:40.313889 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:40 crc kubenswrapper[4757]: I1006 13:39:40.313932 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:40 crc kubenswrapper[4757]: I1006 13:39:40.313950 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:40 crc kubenswrapper[4757]: I1006 13:39:40.313969 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:40 crc kubenswrapper[4757]: I1006 13:39:40.313983 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:40Z","lastTransitionTime":"2025-10-06T13:39:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:40 crc kubenswrapper[4757]: I1006 13:39:40.416176 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:40 crc kubenswrapper[4757]: I1006 13:39:40.416209 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:40 crc kubenswrapper[4757]: I1006 13:39:40.416221 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:40 crc kubenswrapper[4757]: I1006 13:39:40.416237 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:40 crc kubenswrapper[4757]: I1006 13:39:40.416250 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:40Z","lastTransitionTime":"2025-10-06T13:39:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:40 crc kubenswrapper[4757]: I1006 13:39:40.518394 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:40 crc kubenswrapper[4757]: I1006 13:39:40.518434 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:40 crc kubenswrapper[4757]: I1006 13:39:40.518447 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:40 crc kubenswrapper[4757]: I1006 13:39:40.518461 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:40 crc kubenswrapper[4757]: I1006 13:39:40.518473 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:40Z","lastTransitionTime":"2025-10-06T13:39:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:40 crc kubenswrapper[4757]: I1006 13:39:40.603950 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-9qf7z_9144d9fd-70d7-4a29-8e6b-c020c611980a/kube-multus/0.log" Oct 06 13:39:40 crc kubenswrapper[4757]: I1006 13:39:40.604002 4757 generic.go:334] "Generic (PLEG): container finished" podID="9144d9fd-70d7-4a29-8e6b-c020c611980a" containerID="7771512cbe14fa9b8d61be3a354a5fb8436b84ddf455ae4426440f1599f7ad90" exitCode=1 Oct 06 13:39:40 crc kubenswrapper[4757]: I1006 13:39:40.604038 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-9qf7z" event={"ID":"9144d9fd-70d7-4a29-8e6b-c020c611980a","Type":"ContainerDied","Data":"7771512cbe14fa9b8d61be3a354a5fb8436b84ddf455ae4426440f1599f7ad90"} Oct 06 13:39:40 crc kubenswrapper[4757]: I1006 13:39:40.604455 4757 scope.go:117] "RemoveContainer" containerID="7771512cbe14fa9b8d61be3a354a5fb8436b84ddf455ae4426440f1599f7ad90" Oct 06 13:39:40 crc kubenswrapper[4757]: I1006 13:39:40.621149 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aab0f26-c4f7-40b2-892f-0c7d8586a1c1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4119269e9f663bc5c38f4fdc8a5b32f1c3d8446fed6fd36172c41207c9c287b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1bef054ed2b7c27f200f7ab089158ba9946ca17b507c85c377504f539812fcf6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb90630132db1396d9009fe35d6d6b8ab6afe59d32f196cf7a10f3179103227\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://123bbe58d8664c8fad1b86c9c321228f7ffe7cdf14c44e1d9be7802e10a3f0d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://813c083e0e8ba3b397a4b2795de1eac82299d7f673832272138f20162b7f2db2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-06T13:38:46Z\\\",\\\"message\\\":\\\"W1006 13:38:35.655009 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1006 13:38:35.655588 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759757915 cert, and key in /tmp/serving-cert-2033919833/serving-signer.crt, /tmp/serving-cert-2033919833/serving-signer.key\\\\nI1006 13:38:35.906575 1 observer_polling.go:159] Starting file observer\\\\nW1006 13:38:35.908632 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1006 13:38:35.908812 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1006 13:38:35.911572 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2033919833/tls.crt::/tmp/serving-cert-2033919833/tls.key\\\\\\\"\\\\nF1006 13:38:46.393904 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:35Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18af66d22d5905beeec34c4d158a88aadb1a8076e423996cd42e4172a31426e0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:35Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca584765a9838801f99c7fb1f7bbcc740e684033b1b19035ef95b72526c6eda1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca584765a9838801f99c7fb1f7bbcc740e684033b1b19035ef95b72526c6eda1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:32Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:40Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:40 crc kubenswrapper[4757]: I1006 13:39:40.621335 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:40 crc kubenswrapper[4757]: I1006 13:39:40.621373 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:40 crc kubenswrapper[4757]: I1006 13:39:40.621385 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:40 crc kubenswrapper[4757]: I1006 13:39:40.621403 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:40 crc kubenswrapper[4757]: I1006 13:39:40.621415 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:40Z","lastTransitionTime":"2025-10-06T13:39:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:40 crc kubenswrapper[4757]: I1006 13:39:40.643439 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d100743e6a2245c99a227c3380c1e9cbdfec0eff361d21977a3e313fa8f7bfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:40Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:40 crc kubenswrapper[4757]: I1006 13:39:40.662309 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rhrzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62d4cce6-0583-40a6-b7ea-2996d07b49b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://886c6f797531b702123d9931997b7b5e3ebcf9c14c566ff90e0382eae806f925\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a6cfbbedbad94b500e9bc7c5777459ffbf3db038e8a520a2c9c28d381955df55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a6cfbbedbad94b500e9bc7c5777459ffbf3db038e8a520a2c9c28d381955df55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8c16d3a08acde90ccf1beb5e9ced373395af7436ddbc05c24f6489f6f49fc75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8c16d3a08acde90ccf1beb5e9ced373395af7436ddbc05c24f6489f6f49fc75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7471a1d670fcee421cb43e0a8b8a2b5e15c9d428659f8d63be54895493225242\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7471a1d670fcee421cb43e0a8b8a2b5e15c9d428659f8d63be54895493225242\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d5889f0b8d556cbb5791eb0d9d5475c4fa8beb0c5e9614081c8a7002353282a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d5889f0b8d556cbb5791eb0d9d5475c4fa8beb0c5e9614081c8a7002353282a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0e664e1241d006c37ace1635ef93d30b3062960868874ab67b024d872e8b352\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0e664e1241d006c37ace1635ef93d30b3062960868874ab67b024d872e8b352\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4cbf946357ea3bc8763e1c1b16d8794976662c55043426132f1ece7c52dca82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4cbf946357ea3bc8763e1c1b16d8794976662c55043426132f1ece7c52dca82\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rhrzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:40Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:40 crc kubenswrapper[4757]: I1006 13:39:40.673765 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-j889t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a097c3c1-7cfd-4ba8-878d-40b971843e92\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cf3eee034e9bc6cc2729e4cd23569f535ea8d2058f1576f75baaa5cf8407f2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-29wtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:54Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-j889t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:40Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:40 crc kubenswrapper[4757]: I1006 13:39:40.691659 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l2rjj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"34af6a7e-1458-4f7c-bc54-69fb80966b6b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://53547fa1f6011c3003bed92bbd7200050777786dd1fe8b3979b8d9c695c7259d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:39:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5csnl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3d629cc4ee4a614d547fe962c1367b1717f185765e1ccc325fd941f37e3d656\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:39:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5csnl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:39:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-l2rjj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:40Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:40 crc kubenswrapper[4757]: I1006 13:39:40.707578 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cd6138e-cd0c-4140-8173-eeb737e5b20a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e5392ae609235f0d0526e4d115125c20a75af131b89c07ae6bfa3b4bac84108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d06bc2afab6df22e2c8140de3e6288a3e5f4190b6bdb31a2e71170275e70cae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://056d1a4bdaee6e39a0ba0105c9b48d23cd37c37936951e8e2aa123f6de93463d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e08edbfb6418e26eb6c3ef0d229a623bc625e8e20c29d41a95adc8ac8dc2534\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:32Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:40Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:40 crc kubenswrapper[4757]: I1006 13:39:40.720952 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c409c81-c170-4308-98b3-fb1cc65ff1bb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d9249418a973cd8c582a5f126d293b35071887e197f98c0c911823fc76e7ab90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a66ed2ef9cbfedf37b5bc458002c81c7d608f3bb904f5e7480d9874cfe29ada\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://69aec338eb12fbfcd83c476590f12c4a2b4aff507981052b00060900f88a26a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fbe7f72f01daeac82347a11112c1138e744a97c0df2d111fef60c8be750ce39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9fbe7f72f01daeac82347a11112c1138e744a97c0df2d111fef60c8be750ce39\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:32Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:40Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:40 crc kubenswrapper[4757]: I1006 13:39:40.724339 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:40 crc kubenswrapper[4757]: I1006 13:39:40.724379 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:40 crc kubenswrapper[4757]: I1006 13:39:40.724389 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:40 crc kubenswrapper[4757]: I1006 13:39:40.724405 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:40 crc kubenswrapper[4757]: I1006 13:39:40.724415 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:40Z","lastTransitionTime":"2025-10-06T13:39:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:40 crc kubenswrapper[4757]: I1006 13:39:40.735913 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0010c888-d5ad-4b2b-8309-1647fdf0dee3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a2028c8aaf6b1504a3b00edc15272958a5254f6e621734988e959fe0720f1ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kvf5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://151c97566cda63000d69b51293c84fad70c6e331d3a1b62bd3a03ee5732d5214\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kvf5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-7tb7h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:40Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:40 crc kubenswrapper[4757]: I1006 13:39:40.751763 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9qf7z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9144d9fd-70d7-4a29-8e6b-c020c611980a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:40Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7771512cbe14fa9b8d61be3a354a5fb8436b84ddf455ae4426440f1599f7ad90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7771512cbe14fa9b8d61be3a354a5fb8436b84ddf455ae4426440f1599f7ad90\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-06T13:39:39Z\\\",\\\"message\\\":\\\"2025-10-06T13:38:54+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_adb47832-9446-4ad4-96e7-a6a131816e54\\\\n2025-10-06T13:38:54+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_adb47832-9446-4ad4-96e7-a6a131816e54 to /host/opt/cni/bin/\\\\n2025-10-06T13:38:54Z [verbose] multus-daemon started\\\\n2025-10-06T13:38:54Z [verbose] Readiness Indicator file check\\\\n2025-10-06T13:39:39Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mf4fb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9qf7z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:40Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:40 crc kubenswrapper[4757]: I1006 13:39:40.765673 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b45680ebd17f15c56456a2c0eb5c7b58f6f42524be18df11dacbb3f4d904eac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6454ffe81c62fdfd6a5c0ee4aff48e55b1dd83b14b07a93f915fc3023f6bd255\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:40Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:40 crc kubenswrapper[4757]: I1006 13:39:40.779603 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:40Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:40 crc kubenswrapper[4757]: I1006 13:39:40.794435 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:40Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:40 crc kubenswrapper[4757]: I1006 13:39:40.816230 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6624d05-e024-49f2-bf87-33e7ea4fccbb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbe06e8892901366539846fa68ea4e379c32ca7b21843383dc4a72ac70f92b98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75c8038f49690378c7322e4d2e50f7d5073bb744c3f64317b7c3f4acffaa338d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://df2b012b79afa6379b4a7add72a9093bbdbfcc8d618d6eae350a773fdddacfa8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4093bde478f4910c2b1e2341bb0b3f640454b131e31b9ab7ad5900b8260b7561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1580d51cb7ec22f70787157dc80236a312eca77785329c663d78c02ee9a0685d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38df7deb98a16e465d5fdbad8e04c2e62c5cfb418a48fe9f01da335dba2907ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://737f180b936ea2af92cdc2a3c77290ee6ad30c131c7fd3301562f96387c8a211\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://737f180b936ea2af92cdc2a3c77290ee6ad30c131c7fd3301562f96387c8a211\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-06T13:39:20Z\\\",\\\"message\\\":\\\"e (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1006 13:39:20.135763 6389 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1006 13:39:20.135790 6389 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1006 13:39:20.135830 6389 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1006 13:39:20.135845 6389 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1006 13:39:20.135848 6389 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1006 13:39:20.135872 6389 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1006 13:39:20.135873 6389 handler.go:208] Removed *v1.Node event handler 2\\\\nI1006 13:39:20.135888 6389 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1006 13:39:20.135916 6389 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1006 13:39:20.135920 6389 handler.go:208] Removed *v1.Node event handler 7\\\\nI1006 13:39:20.135944 6389 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1006 13:39:20.135983 6389 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1006 13:39:20.135991 6389 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1006 13:39:20.135996 6389 factory.go:656] Stopping watch factory\\\\nI1006 13:39:20.136011 6389 handler.go:208] Removed *v1.EgressFirewall ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-06T13:39:19Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-58bhb_openshift-ovn-kubernetes(a6624d05-e024-49f2-bf87-33e7ea4fccbb)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5852bc46ae07b295a97cca9f5ef7d06484c12744c0f6ee57b2d14acee319fcaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3fe5f1f425ddd770c518939dee061dc2641c1ca6c3538d8d30ac0724a6353936\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3fe5f1f425ddd770c518939dee061dc2641c1ca6c3538d8d30ac0724a6353936\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-58bhb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:40Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:40 crc kubenswrapper[4757]: I1006 13:39:40.827131 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:40 crc kubenswrapper[4757]: I1006 13:39:40.827186 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:40 crc kubenswrapper[4757]: I1006 13:39:40.827197 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:40 crc kubenswrapper[4757]: I1006 13:39:40.827214 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:40 crc kubenswrapper[4757]: I1006 13:39:40.827225 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:40Z","lastTransitionTime":"2025-10-06T13:39:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:40 crc kubenswrapper[4757]: I1006 13:39:40.831901 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-sc9qx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a0a24d2-8946-4710-91f2-cc59ecedb5e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwpmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwpmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:39:06Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-sc9qx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:40Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:40 crc kubenswrapper[4757]: I1006 13:39:40.845186 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:40Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:40 crc kubenswrapper[4757]: I1006 13:39:40.855889 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9j5jn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de18b9fe-e396-469e-a6f6-d87ce91f3270\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4b5ed23b64e4bbc9f76f8cdef77d7f608d8ba029b540b69f64faf266f843155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2cslg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9j5jn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:40Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:40 crc kubenswrapper[4757]: I1006 13:39:40.868927 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70151c959d98e2cb056e57e50b67c68b1c031beb375b357c8941bc9b81dc040f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:40Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:40 crc kubenswrapper[4757]: I1006 13:39:40.930067 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:40 crc kubenswrapper[4757]: I1006 13:39:40.930139 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:40 crc kubenswrapper[4757]: I1006 13:39:40.930155 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:40 crc kubenswrapper[4757]: I1006 13:39:40.930184 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:40 crc kubenswrapper[4757]: I1006 13:39:40.930197 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:40Z","lastTransitionTime":"2025-10-06T13:39:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:41 crc kubenswrapper[4757]: I1006 13:39:41.032828 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:41 crc kubenswrapper[4757]: I1006 13:39:41.032871 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:41 crc kubenswrapper[4757]: I1006 13:39:41.032880 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:41 crc kubenswrapper[4757]: I1006 13:39:41.032893 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:41 crc kubenswrapper[4757]: I1006 13:39:41.032904 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:41Z","lastTransitionTime":"2025-10-06T13:39:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:41 crc kubenswrapper[4757]: I1006 13:39:41.135676 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:41 crc kubenswrapper[4757]: I1006 13:39:41.135721 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:41 crc kubenswrapper[4757]: I1006 13:39:41.135732 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:41 crc kubenswrapper[4757]: I1006 13:39:41.135746 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:41 crc kubenswrapper[4757]: I1006 13:39:41.135758 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:41Z","lastTransitionTime":"2025-10-06T13:39:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:41 crc kubenswrapper[4757]: I1006 13:39:41.179442 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 06 13:39:41 crc kubenswrapper[4757]: E1006 13:39:41.179567 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 06 13:39:41 crc kubenswrapper[4757]: I1006 13:39:41.239359 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:41 crc kubenswrapper[4757]: I1006 13:39:41.239409 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:41 crc kubenswrapper[4757]: I1006 13:39:41.239434 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:41 crc kubenswrapper[4757]: I1006 13:39:41.239463 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:41 crc kubenswrapper[4757]: I1006 13:39:41.239483 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:41Z","lastTransitionTime":"2025-10-06T13:39:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:41 crc kubenswrapper[4757]: I1006 13:39:41.343025 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:41 crc kubenswrapper[4757]: I1006 13:39:41.344003 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:41 crc kubenswrapper[4757]: I1006 13:39:41.344165 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:41 crc kubenswrapper[4757]: I1006 13:39:41.344306 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:41 crc kubenswrapper[4757]: I1006 13:39:41.344427 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:41Z","lastTransitionTime":"2025-10-06T13:39:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:41 crc kubenswrapper[4757]: I1006 13:39:41.448223 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:41 crc kubenswrapper[4757]: I1006 13:39:41.448260 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:41 crc kubenswrapper[4757]: I1006 13:39:41.448269 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:41 crc kubenswrapper[4757]: I1006 13:39:41.448283 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:41 crc kubenswrapper[4757]: I1006 13:39:41.448291 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:41Z","lastTransitionTime":"2025-10-06T13:39:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:41 crc kubenswrapper[4757]: I1006 13:39:41.551339 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:41 crc kubenswrapper[4757]: I1006 13:39:41.551386 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:41 crc kubenswrapper[4757]: I1006 13:39:41.551394 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:41 crc kubenswrapper[4757]: I1006 13:39:41.551410 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:41 crc kubenswrapper[4757]: I1006 13:39:41.551420 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:41Z","lastTransitionTime":"2025-10-06T13:39:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:41 crc kubenswrapper[4757]: I1006 13:39:41.608474 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-9qf7z_9144d9fd-70d7-4a29-8e6b-c020c611980a/kube-multus/0.log" Oct 06 13:39:41 crc kubenswrapper[4757]: I1006 13:39:41.608740 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-9qf7z" event={"ID":"9144d9fd-70d7-4a29-8e6b-c020c611980a","Type":"ContainerStarted","Data":"ec9c673c59529f08cb919af940f8384584309e4e113c65431a529b935d1df3f9"} Oct 06 13:39:41 crc kubenswrapper[4757]: I1006 13:39:41.623441 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cd6138e-cd0c-4140-8173-eeb737e5b20a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e5392ae609235f0d0526e4d115125c20a75af131b89c07ae6bfa3b4bac84108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d06bc2afab6df22e2c8140de3e6288a3e5f4190b6bdb31a2e71170275e70cae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://056d1a4bdaee6e39a0ba0105c9b48d23cd37c37936951e8e2aa123f6de93463d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e08edbfb6418e26eb6c3ef0d229a623bc625e8e20c29d41a95adc8ac8dc2534\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:32Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:41Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:41 crc kubenswrapper[4757]: I1006 13:39:41.637216 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c409c81-c170-4308-98b3-fb1cc65ff1bb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d9249418a973cd8c582a5f126d293b35071887e197f98c0c911823fc76e7ab90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a66ed2ef9cbfedf37b5bc458002c81c7d608f3bb904f5e7480d9874cfe29ada\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://69aec338eb12fbfcd83c476590f12c4a2b4aff507981052b00060900f88a26a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fbe7f72f01daeac82347a11112c1138e744a97c0df2d111fef60c8be750ce39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9fbe7f72f01daeac82347a11112c1138e744a97c0df2d111fef60c8be750ce39\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:32Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:41Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:41 crc kubenswrapper[4757]: I1006 13:39:41.653694 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:41 crc kubenswrapper[4757]: I1006 13:39:41.654024 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:41 crc kubenswrapper[4757]: I1006 13:39:41.654158 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:41 crc kubenswrapper[4757]: I1006 13:39:41.654265 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:41 crc kubenswrapper[4757]: I1006 13:39:41.653992 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0010c888-d5ad-4b2b-8309-1647fdf0dee3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a2028c8aaf6b1504a3b00edc15272958a5254f6e621734988e959fe0720f1ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kvf5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://151c97566cda63000d69b51293c84fad70c6e331d3a1b62bd3a03ee5732d5214\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kvf5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-7tb7h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:41Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:41 crc kubenswrapper[4757]: I1006 13:39:41.654379 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:41Z","lastTransitionTime":"2025-10-06T13:39:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:41 crc kubenswrapper[4757]: I1006 13:39:41.671158 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9qf7z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9144d9fd-70d7-4a29-8e6b-c020c611980a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec9c673c59529f08cb919af940f8384584309e4e113c65431a529b935d1df3f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7771512cbe14fa9b8d61be3a354a5fb8436b84ddf455ae4426440f1599f7ad90\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-06T13:39:39Z\\\",\\\"message\\\":\\\"2025-10-06T13:38:54+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_adb47832-9446-4ad4-96e7-a6a131816e54\\\\n2025-10-06T13:38:54+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_adb47832-9446-4ad4-96e7-a6a131816e54 to /host/opt/cni/bin/\\\\n2025-10-06T13:38:54Z [verbose] multus-daemon started\\\\n2025-10-06T13:38:54Z [verbose] Readiness Indicator file check\\\\n2025-10-06T13:39:39Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:39:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mf4fb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9qf7z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:41Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:41 crc kubenswrapper[4757]: I1006 13:39:41.685669 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rhrzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62d4cce6-0583-40a6-b7ea-2996d07b49b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://886c6f797531b702123d9931997b7b5e3ebcf9c14c566ff90e0382eae806f925\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a6cfbbedbad94b500e9bc7c5777459ffbf3db038e8a520a2c9c28d381955df55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a6cfbbedbad94b500e9bc7c5777459ffbf3db038e8a520a2c9c28d381955df55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8c16d3a08acde90ccf1beb5e9ced373395af7436ddbc05c24f6489f6f49fc75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8c16d3a08acde90ccf1beb5e9ced373395af7436ddbc05c24f6489f6f49fc75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7471a1d670fcee421cb43e0a8b8a2b5e15c9d428659f8d63be54895493225242\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7471a1d670fcee421cb43e0a8b8a2b5e15c9d428659f8d63be54895493225242\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d5889f0b8d556cbb5791eb0d9d5475c4fa8beb0c5e9614081c8a7002353282a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d5889f0b8d556cbb5791eb0d9d5475c4fa8beb0c5e9614081c8a7002353282a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0e664e1241d006c37ace1635ef93d30b3062960868874ab67b024d872e8b352\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0e664e1241d006c37ace1635ef93d30b3062960868874ab67b024d872e8b352\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4cbf946357ea3bc8763e1c1b16d8794976662c55043426132f1ece7c52dca82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4cbf946357ea3bc8763e1c1b16d8794976662c55043426132f1ece7c52dca82\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rhrzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:41Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:41 crc kubenswrapper[4757]: I1006 13:39:41.697943 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-j889t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a097c3c1-7cfd-4ba8-878d-40b971843e92\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cf3eee034e9bc6cc2729e4cd23569f535ea8d2058f1576f75baaa5cf8407f2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-29wtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:54Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-j889t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:41Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:41 crc kubenswrapper[4757]: I1006 13:39:41.711266 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l2rjj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"34af6a7e-1458-4f7c-bc54-69fb80966b6b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://53547fa1f6011c3003bed92bbd7200050777786dd1fe8b3979b8d9c695c7259d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:39:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5csnl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3d629cc4ee4a614d547fe962c1367b1717f185765e1ccc325fd941f37e3d656\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:39:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5csnl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:39:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-l2rjj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:41Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:41 crc kubenswrapper[4757]: I1006 13:39:41.724712 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b45680ebd17f15c56456a2c0eb5c7b58f6f42524be18df11dacbb3f4d904eac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6454ffe81c62fdfd6a5c0ee4aff48e55b1dd83b14b07a93f915fc3023f6bd255\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:41Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:41 crc kubenswrapper[4757]: I1006 13:39:41.737820 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:41Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:41 crc kubenswrapper[4757]: I1006 13:39:41.751486 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:41Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:41 crc kubenswrapper[4757]: I1006 13:39:41.756732 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:41 crc kubenswrapper[4757]: I1006 13:39:41.756895 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:41 crc kubenswrapper[4757]: I1006 13:39:41.756995 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:41 crc kubenswrapper[4757]: I1006 13:39:41.757144 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:41 crc kubenswrapper[4757]: I1006 13:39:41.757267 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:41Z","lastTransitionTime":"2025-10-06T13:39:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:41 crc kubenswrapper[4757]: I1006 13:39:41.762550 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9j5jn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de18b9fe-e396-469e-a6f6-d87ce91f3270\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4b5ed23b64e4bbc9f76f8cdef77d7f608d8ba029b540b69f64faf266f843155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2cslg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9j5jn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:41Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:41 crc kubenswrapper[4757]: I1006 13:39:41.775681 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70151c959d98e2cb056e57e50b67c68b1c031beb375b357c8941bc9b81dc040f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:41Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:41 crc kubenswrapper[4757]: I1006 13:39:41.787114 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:41Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:41 crc kubenswrapper[4757]: I1006 13:39:41.805425 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6624d05-e024-49f2-bf87-33e7ea4fccbb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbe06e8892901366539846fa68ea4e379c32ca7b21843383dc4a72ac70f92b98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75c8038f49690378c7322e4d2e50f7d5073bb744c3f64317b7c3f4acffaa338d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://df2b012b79afa6379b4a7add72a9093bbdbfcc8d618d6eae350a773fdddacfa8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4093bde478f4910c2b1e2341bb0b3f640454b131e31b9ab7ad5900b8260b7561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1580d51cb7ec22f70787157dc80236a312eca77785329c663d78c02ee9a0685d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38df7deb98a16e465d5fdbad8e04c2e62c5cfb418a48fe9f01da335dba2907ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://737f180b936ea2af92cdc2a3c77290ee6ad30c131c7fd3301562f96387c8a211\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://737f180b936ea2af92cdc2a3c77290ee6ad30c131c7fd3301562f96387c8a211\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-06T13:39:20Z\\\",\\\"message\\\":\\\"e (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1006 13:39:20.135763 6389 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1006 13:39:20.135790 6389 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1006 13:39:20.135830 6389 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1006 13:39:20.135845 6389 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1006 13:39:20.135848 6389 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1006 13:39:20.135872 6389 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1006 13:39:20.135873 6389 handler.go:208] Removed *v1.Node event handler 2\\\\nI1006 13:39:20.135888 6389 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1006 13:39:20.135916 6389 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1006 13:39:20.135920 6389 handler.go:208] Removed *v1.Node event handler 7\\\\nI1006 13:39:20.135944 6389 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1006 13:39:20.135983 6389 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1006 13:39:20.135991 6389 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1006 13:39:20.135996 6389 factory.go:656] Stopping watch factory\\\\nI1006 13:39:20.136011 6389 handler.go:208] Removed *v1.EgressFirewall ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-06T13:39:19Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-58bhb_openshift-ovn-kubernetes(a6624d05-e024-49f2-bf87-33e7ea4fccbb)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5852bc46ae07b295a97cca9f5ef7d06484c12744c0f6ee57b2d14acee319fcaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3fe5f1f425ddd770c518939dee061dc2641c1ca6c3538d8d30ac0724a6353936\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3fe5f1f425ddd770c518939dee061dc2641c1ca6c3538d8d30ac0724a6353936\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-58bhb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:41Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:41 crc kubenswrapper[4757]: I1006 13:39:41.820144 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-sc9qx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a0a24d2-8946-4710-91f2-cc59ecedb5e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwpmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwpmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:39:06Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-sc9qx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:41Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:41 crc kubenswrapper[4757]: I1006 13:39:41.833365 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aab0f26-c4f7-40b2-892f-0c7d8586a1c1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4119269e9f663bc5c38f4fdc8a5b32f1c3d8446fed6fd36172c41207c9c287b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1bef054ed2b7c27f200f7ab089158ba9946ca17b507c85c377504f539812fcf6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb90630132db1396d9009fe35d6d6b8ab6afe59d32f196cf7a10f3179103227\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://123bbe58d8664c8fad1b86c9c321228f7ffe7cdf14c44e1d9be7802e10a3f0d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://813c083e0e8ba3b397a4b2795de1eac82299d7f673832272138f20162b7f2db2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-06T13:38:46Z\\\",\\\"message\\\":\\\"W1006 13:38:35.655009 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1006 13:38:35.655588 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759757915 cert, and key in /tmp/serving-cert-2033919833/serving-signer.crt, /tmp/serving-cert-2033919833/serving-signer.key\\\\nI1006 13:38:35.906575 1 observer_polling.go:159] Starting file observer\\\\nW1006 13:38:35.908632 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1006 13:38:35.908812 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1006 13:38:35.911572 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2033919833/tls.crt::/tmp/serving-cert-2033919833/tls.key\\\\\\\"\\\\nF1006 13:38:46.393904 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:35Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18af66d22d5905beeec34c4d158a88aadb1a8076e423996cd42e4172a31426e0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:35Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca584765a9838801f99c7fb1f7bbcc740e684033b1b19035ef95b72526c6eda1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca584765a9838801f99c7fb1f7bbcc740e684033b1b19035ef95b72526c6eda1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:32Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:41Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:41 crc kubenswrapper[4757]: I1006 13:39:41.847142 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d100743e6a2245c99a227c3380c1e9cbdfec0eff361d21977a3e313fa8f7bfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:41Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:41 crc kubenswrapper[4757]: I1006 13:39:41.859955 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:41 crc kubenswrapper[4757]: I1006 13:39:41.859985 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:41 crc kubenswrapper[4757]: I1006 13:39:41.859993 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:41 crc kubenswrapper[4757]: I1006 13:39:41.860007 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:41 crc kubenswrapper[4757]: I1006 13:39:41.860017 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:41Z","lastTransitionTime":"2025-10-06T13:39:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:41 crc kubenswrapper[4757]: I1006 13:39:41.963214 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:41 crc kubenswrapper[4757]: I1006 13:39:41.963256 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:41 crc kubenswrapper[4757]: I1006 13:39:41.963266 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:41 crc kubenswrapper[4757]: I1006 13:39:41.963281 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:41 crc kubenswrapper[4757]: I1006 13:39:41.963293 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:41Z","lastTransitionTime":"2025-10-06T13:39:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:42 crc kubenswrapper[4757]: I1006 13:39:42.065898 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:42 crc kubenswrapper[4757]: I1006 13:39:42.065928 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:42 crc kubenswrapper[4757]: I1006 13:39:42.065936 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:42 crc kubenswrapper[4757]: I1006 13:39:42.065948 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:42 crc kubenswrapper[4757]: I1006 13:39:42.065992 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:42Z","lastTransitionTime":"2025-10-06T13:39:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:42 crc kubenswrapper[4757]: I1006 13:39:42.168259 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:42 crc kubenswrapper[4757]: I1006 13:39:42.168305 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:42 crc kubenswrapper[4757]: I1006 13:39:42.168317 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:42 crc kubenswrapper[4757]: I1006 13:39:42.168334 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:42 crc kubenswrapper[4757]: I1006 13:39:42.168349 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:42Z","lastTransitionTime":"2025-10-06T13:39:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:42 crc kubenswrapper[4757]: I1006 13:39:42.179188 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 06 13:39:42 crc kubenswrapper[4757]: I1006 13:39:42.179259 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-sc9qx" Oct 06 13:39:42 crc kubenswrapper[4757]: E1006 13:39:42.179299 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 06 13:39:42 crc kubenswrapper[4757]: E1006 13:39:42.179405 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-sc9qx" podUID="8a0a24d2-8946-4710-91f2-cc59ecedb5e3" Oct 06 13:39:42 crc kubenswrapper[4757]: I1006 13:39:42.179571 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 06 13:39:42 crc kubenswrapper[4757]: E1006 13:39:42.179638 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 06 13:39:42 crc kubenswrapper[4757]: I1006 13:39:42.191909 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b45680ebd17f15c56456a2c0eb5c7b58f6f42524be18df11dacbb3f4d904eac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6454ffe81c62fdfd6a5c0ee4aff48e55b1dd83b14b07a93f915fc3023f6bd255\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:42Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:42 crc kubenswrapper[4757]: I1006 13:39:42.204492 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:42Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:42 crc kubenswrapper[4757]: I1006 13:39:42.215933 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:42Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:42 crc kubenswrapper[4757]: I1006 13:39:42.225945 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9j5jn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de18b9fe-e396-469e-a6f6-d87ce91f3270\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4b5ed23b64e4bbc9f76f8cdef77d7f608d8ba029b540b69f64faf266f843155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2cslg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9j5jn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:42Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:42 crc kubenswrapper[4757]: I1006 13:39:42.237591 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70151c959d98e2cb056e57e50b67c68b1c031beb375b357c8941bc9b81dc040f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:42Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:42 crc kubenswrapper[4757]: I1006 13:39:42.249532 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:42Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:42 crc kubenswrapper[4757]: I1006 13:39:42.265271 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6624d05-e024-49f2-bf87-33e7ea4fccbb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbe06e8892901366539846fa68ea4e379c32ca7b21843383dc4a72ac70f92b98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75c8038f49690378c7322e4d2e50f7d5073bb744c3f64317b7c3f4acffaa338d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://df2b012b79afa6379b4a7add72a9093bbdbfcc8d618d6eae350a773fdddacfa8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4093bde478f4910c2b1e2341bb0b3f640454b131e31b9ab7ad5900b8260b7561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1580d51cb7ec22f70787157dc80236a312eca77785329c663d78c02ee9a0685d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38df7deb98a16e465d5fdbad8e04c2e62c5cfb418a48fe9f01da335dba2907ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://737f180b936ea2af92cdc2a3c77290ee6ad30c131c7fd3301562f96387c8a211\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://737f180b936ea2af92cdc2a3c77290ee6ad30c131c7fd3301562f96387c8a211\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-06T13:39:20Z\\\",\\\"message\\\":\\\"e (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1006 13:39:20.135763 6389 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1006 13:39:20.135790 6389 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1006 13:39:20.135830 6389 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1006 13:39:20.135845 6389 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1006 13:39:20.135848 6389 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1006 13:39:20.135872 6389 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1006 13:39:20.135873 6389 handler.go:208] Removed *v1.Node event handler 2\\\\nI1006 13:39:20.135888 6389 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1006 13:39:20.135916 6389 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1006 13:39:20.135920 6389 handler.go:208] Removed *v1.Node event handler 7\\\\nI1006 13:39:20.135944 6389 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1006 13:39:20.135983 6389 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1006 13:39:20.135991 6389 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1006 13:39:20.135996 6389 factory.go:656] Stopping watch factory\\\\nI1006 13:39:20.136011 6389 handler.go:208] Removed *v1.EgressFirewall ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-06T13:39:19Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-58bhb_openshift-ovn-kubernetes(a6624d05-e024-49f2-bf87-33e7ea4fccbb)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5852bc46ae07b295a97cca9f5ef7d06484c12744c0f6ee57b2d14acee319fcaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3fe5f1f425ddd770c518939dee061dc2641c1ca6c3538d8d30ac0724a6353936\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3fe5f1f425ddd770c518939dee061dc2641c1ca6c3538d8d30ac0724a6353936\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-58bhb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:42Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:42 crc kubenswrapper[4757]: I1006 13:39:42.270546 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:42 crc kubenswrapper[4757]: I1006 13:39:42.270718 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:42 crc kubenswrapper[4757]: I1006 13:39:42.270785 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:42 crc kubenswrapper[4757]: I1006 13:39:42.270871 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:42 crc kubenswrapper[4757]: I1006 13:39:42.270964 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:42Z","lastTransitionTime":"2025-10-06T13:39:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:42 crc kubenswrapper[4757]: I1006 13:39:42.275319 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-sc9qx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a0a24d2-8946-4710-91f2-cc59ecedb5e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwpmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwpmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:39:06Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-sc9qx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:42Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:42 crc kubenswrapper[4757]: I1006 13:39:42.286885 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aab0f26-c4f7-40b2-892f-0c7d8586a1c1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4119269e9f663bc5c38f4fdc8a5b32f1c3d8446fed6fd36172c41207c9c287b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1bef054ed2b7c27f200f7ab089158ba9946ca17b507c85c377504f539812fcf6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb90630132db1396d9009fe35d6d6b8ab6afe59d32f196cf7a10f3179103227\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://123bbe58d8664c8fad1b86c9c321228f7ffe7cdf14c44e1d9be7802e10a3f0d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://813c083e0e8ba3b397a4b2795de1eac82299d7f673832272138f20162b7f2db2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-06T13:38:46Z\\\",\\\"message\\\":\\\"W1006 13:38:35.655009 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1006 13:38:35.655588 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759757915 cert, and key in /tmp/serving-cert-2033919833/serving-signer.crt, /tmp/serving-cert-2033919833/serving-signer.key\\\\nI1006 13:38:35.906575 1 observer_polling.go:159] Starting file observer\\\\nW1006 13:38:35.908632 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1006 13:38:35.908812 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1006 13:38:35.911572 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2033919833/tls.crt::/tmp/serving-cert-2033919833/tls.key\\\\\\\"\\\\nF1006 13:38:46.393904 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:35Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18af66d22d5905beeec34c4d158a88aadb1a8076e423996cd42e4172a31426e0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:35Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca584765a9838801f99c7fb1f7bbcc740e684033b1b19035ef95b72526c6eda1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca584765a9838801f99c7fb1f7bbcc740e684033b1b19035ef95b72526c6eda1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:32Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:42Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:42 crc kubenswrapper[4757]: I1006 13:39:42.298593 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d100743e6a2245c99a227c3380c1e9cbdfec0eff361d21977a3e313fa8f7bfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:42Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:42 crc kubenswrapper[4757]: I1006 13:39:42.310059 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cd6138e-cd0c-4140-8173-eeb737e5b20a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e5392ae609235f0d0526e4d115125c20a75af131b89c07ae6bfa3b4bac84108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d06bc2afab6df22e2c8140de3e6288a3e5f4190b6bdb31a2e71170275e70cae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://056d1a4bdaee6e39a0ba0105c9b48d23cd37c37936951e8e2aa123f6de93463d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e08edbfb6418e26eb6c3ef0d229a623bc625e8e20c29d41a95adc8ac8dc2534\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:32Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:42Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:42 crc kubenswrapper[4757]: I1006 13:39:42.320975 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c409c81-c170-4308-98b3-fb1cc65ff1bb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d9249418a973cd8c582a5f126d293b35071887e197f98c0c911823fc76e7ab90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a66ed2ef9cbfedf37b5bc458002c81c7d608f3bb904f5e7480d9874cfe29ada\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://69aec338eb12fbfcd83c476590f12c4a2b4aff507981052b00060900f88a26a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fbe7f72f01daeac82347a11112c1138e744a97c0df2d111fef60c8be750ce39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9fbe7f72f01daeac82347a11112c1138e744a97c0df2d111fef60c8be750ce39\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:32Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:42Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:42 crc kubenswrapper[4757]: I1006 13:39:42.332204 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0010c888-d5ad-4b2b-8309-1647fdf0dee3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a2028c8aaf6b1504a3b00edc15272958a5254f6e621734988e959fe0720f1ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kvf5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://151c97566cda63000d69b51293c84fad70c6e331d3a1b62bd3a03ee5732d5214\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kvf5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-7tb7h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:42Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:42 crc kubenswrapper[4757]: I1006 13:39:42.343435 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9qf7z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9144d9fd-70d7-4a29-8e6b-c020c611980a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec9c673c59529f08cb919af940f8384584309e4e113c65431a529b935d1df3f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7771512cbe14fa9b8d61be3a354a5fb8436b84ddf455ae4426440f1599f7ad90\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-06T13:39:39Z\\\",\\\"message\\\":\\\"2025-10-06T13:38:54+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_adb47832-9446-4ad4-96e7-a6a131816e54\\\\n2025-10-06T13:38:54+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_adb47832-9446-4ad4-96e7-a6a131816e54 to /host/opt/cni/bin/\\\\n2025-10-06T13:38:54Z [verbose] multus-daemon started\\\\n2025-10-06T13:38:54Z [verbose] Readiness Indicator file check\\\\n2025-10-06T13:39:39Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:39:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mf4fb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9qf7z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:42Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:42 crc kubenswrapper[4757]: I1006 13:39:42.358490 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rhrzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62d4cce6-0583-40a6-b7ea-2996d07b49b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://886c6f797531b702123d9931997b7b5e3ebcf9c14c566ff90e0382eae806f925\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a6cfbbedbad94b500e9bc7c5777459ffbf3db038e8a520a2c9c28d381955df55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a6cfbbedbad94b500e9bc7c5777459ffbf3db038e8a520a2c9c28d381955df55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8c16d3a08acde90ccf1beb5e9ced373395af7436ddbc05c24f6489f6f49fc75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8c16d3a08acde90ccf1beb5e9ced373395af7436ddbc05c24f6489f6f49fc75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7471a1d670fcee421cb43e0a8b8a2b5e15c9d428659f8d63be54895493225242\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7471a1d670fcee421cb43e0a8b8a2b5e15c9d428659f8d63be54895493225242\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d5889f0b8d556cbb5791eb0d9d5475c4fa8beb0c5e9614081c8a7002353282a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d5889f0b8d556cbb5791eb0d9d5475c4fa8beb0c5e9614081c8a7002353282a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0e664e1241d006c37ace1635ef93d30b3062960868874ab67b024d872e8b352\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0e664e1241d006c37ace1635ef93d30b3062960868874ab67b024d872e8b352\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4cbf946357ea3bc8763e1c1b16d8794976662c55043426132f1ece7c52dca82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4cbf946357ea3bc8763e1c1b16d8794976662c55043426132f1ece7c52dca82\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rhrzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:42Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:42 crc kubenswrapper[4757]: I1006 13:39:42.368748 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-j889t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a097c3c1-7cfd-4ba8-878d-40b971843e92\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cf3eee034e9bc6cc2729e4cd23569f535ea8d2058f1576f75baaa5cf8407f2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-29wtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:54Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-j889t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:42Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:42 crc kubenswrapper[4757]: I1006 13:39:42.374870 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:42 crc kubenswrapper[4757]: I1006 13:39:42.374924 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:42 crc kubenswrapper[4757]: I1006 13:39:42.374939 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:42 crc kubenswrapper[4757]: I1006 13:39:42.374958 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:42 crc kubenswrapper[4757]: I1006 13:39:42.374972 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:42Z","lastTransitionTime":"2025-10-06T13:39:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:42 crc kubenswrapper[4757]: I1006 13:39:42.380558 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l2rjj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"34af6a7e-1458-4f7c-bc54-69fb80966b6b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://53547fa1f6011c3003bed92bbd7200050777786dd1fe8b3979b8d9c695c7259d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:39:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5csnl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3d629cc4ee4a614d547fe962c1367b1717f185765e1ccc325fd941f37e3d656\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:39:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5csnl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:39:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-l2rjj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:42Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:42 crc kubenswrapper[4757]: I1006 13:39:42.477894 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:42 crc kubenswrapper[4757]: I1006 13:39:42.477938 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:42 crc kubenswrapper[4757]: I1006 13:39:42.477950 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:42 crc kubenswrapper[4757]: I1006 13:39:42.477966 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:42 crc kubenswrapper[4757]: I1006 13:39:42.477978 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:42Z","lastTransitionTime":"2025-10-06T13:39:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:42 crc kubenswrapper[4757]: I1006 13:39:42.581135 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:42 crc kubenswrapper[4757]: I1006 13:39:42.584034 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:42 crc kubenswrapper[4757]: I1006 13:39:42.584179 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:42 crc kubenswrapper[4757]: I1006 13:39:42.584255 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:42 crc kubenswrapper[4757]: I1006 13:39:42.584321 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:42Z","lastTransitionTime":"2025-10-06T13:39:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:42 crc kubenswrapper[4757]: I1006 13:39:42.689851 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:42 crc kubenswrapper[4757]: I1006 13:39:42.689901 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:42 crc kubenswrapper[4757]: I1006 13:39:42.689919 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:42 crc kubenswrapper[4757]: I1006 13:39:42.689938 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:42 crc kubenswrapper[4757]: I1006 13:39:42.689949 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:42Z","lastTransitionTime":"2025-10-06T13:39:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:42 crc kubenswrapper[4757]: I1006 13:39:42.794246 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:42 crc kubenswrapper[4757]: I1006 13:39:42.794309 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:42 crc kubenswrapper[4757]: I1006 13:39:42.794317 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:42 crc kubenswrapper[4757]: I1006 13:39:42.794369 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:42 crc kubenswrapper[4757]: I1006 13:39:42.794381 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:42Z","lastTransitionTime":"2025-10-06T13:39:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:42 crc kubenswrapper[4757]: I1006 13:39:42.896847 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:42 crc kubenswrapper[4757]: I1006 13:39:42.896902 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:42 crc kubenswrapper[4757]: I1006 13:39:42.896913 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:42 crc kubenswrapper[4757]: I1006 13:39:42.896926 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:42 crc kubenswrapper[4757]: I1006 13:39:42.896936 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:42Z","lastTransitionTime":"2025-10-06T13:39:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:42 crc kubenswrapper[4757]: I1006 13:39:42.999001 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:42 crc kubenswrapper[4757]: I1006 13:39:42.999081 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:42 crc kubenswrapper[4757]: I1006 13:39:42.999139 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:42 crc kubenswrapper[4757]: I1006 13:39:42.999172 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:42 crc kubenswrapper[4757]: I1006 13:39:42.999195 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:42Z","lastTransitionTime":"2025-10-06T13:39:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:43 crc kubenswrapper[4757]: I1006 13:39:43.101997 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:43 crc kubenswrapper[4757]: I1006 13:39:43.102368 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:43 crc kubenswrapper[4757]: I1006 13:39:43.102482 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:43 crc kubenswrapper[4757]: I1006 13:39:43.102599 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:43 crc kubenswrapper[4757]: I1006 13:39:43.102671 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:43Z","lastTransitionTime":"2025-10-06T13:39:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:43 crc kubenswrapper[4757]: I1006 13:39:43.179253 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 06 13:39:43 crc kubenswrapper[4757]: E1006 13:39:43.179485 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 06 13:39:43 crc kubenswrapper[4757]: I1006 13:39:43.205181 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:43 crc kubenswrapper[4757]: I1006 13:39:43.205224 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:43 crc kubenswrapper[4757]: I1006 13:39:43.205236 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:43 crc kubenswrapper[4757]: I1006 13:39:43.205251 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:43 crc kubenswrapper[4757]: I1006 13:39:43.205265 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:43Z","lastTransitionTime":"2025-10-06T13:39:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:43 crc kubenswrapper[4757]: I1006 13:39:43.307265 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:43 crc kubenswrapper[4757]: I1006 13:39:43.307595 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:43 crc kubenswrapper[4757]: I1006 13:39:43.307672 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:43 crc kubenswrapper[4757]: I1006 13:39:43.307752 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:43 crc kubenswrapper[4757]: I1006 13:39:43.307831 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:43Z","lastTransitionTime":"2025-10-06T13:39:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:43 crc kubenswrapper[4757]: I1006 13:39:43.410732 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:43 crc kubenswrapper[4757]: I1006 13:39:43.410775 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:43 crc kubenswrapper[4757]: I1006 13:39:43.410786 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:43 crc kubenswrapper[4757]: I1006 13:39:43.410802 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:43 crc kubenswrapper[4757]: I1006 13:39:43.410815 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:43Z","lastTransitionTime":"2025-10-06T13:39:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:43 crc kubenswrapper[4757]: I1006 13:39:43.512670 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:43 crc kubenswrapper[4757]: I1006 13:39:43.512719 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:43 crc kubenswrapper[4757]: I1006 13:39:43.512729 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:43 crc kubenswrapper[4757]: I1006 13:39:43.512742 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:43 crc kubenswrapper[4757]: I1006 13:39:43.512752 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:43Z","lastTransitionTime":"2025-10-06T13:39:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:43 crc kubenswrapper[4757]: I1006 13:39:43.615278 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:43 crc kubenswrapper[4757]: I1006 13:39:43.615339 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:43 crc kubenswrapper[4757]: I1006 13:39:43.615352 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:43 crc kubenswrapper[4757]: I1006 13:39:43.615373 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:43 crc kubenswrapper[4757]: I1006 13:39:43.615388 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:43Z","lastTransitionTime":"2025-10-06T13:39:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:43 crc kubenswrapper[4757]: I1006 13:39:43.717547 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:43 crc kubenswrapper[4757]: I1006 13:39:43.717830 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:43 crc kubenswrapper[4757]: I1006 13:39:43.717983 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:43 crc kubenswrapper[4757]: I1006 13:39:43.718076 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:43 crc kubenswrapper[4757]: I1006 13:39:43.718181 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:43Z","lastTransitionTime":"2025-10-06T13:39:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:43 crc kubenswrapper[4757]: I1006 13:39:43.821410 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:43 crc kubenswrapper[4757]: I1006 13:39:43.821459 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:43 crc kubenswrapper[4757]: I1006 13:39:43.821473 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:43 crc kubenswrapper[4757]: I1006 13:39:43.821495 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:43 crc kubenswrapper[4757]: I1006 13:39:43.821511 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:43Z","lastTransitionTime":"2025-10-06T13:39:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:43 crc kubenswrapper[4757]: I1006 13:39:43.923727 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:43 crc kubenswrapper[4757]: I1006 13:39:43.924066 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:43 crc kubenswrapper[4757]: I1006 13:39:43.924387 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:43 crc kubenswrapper[4757]: I1006 13:39:43.924658 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:43 crc kubenswrapper[4757]: I1006 13:39:43.924920 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:43Z","lastTransitionTime":"2025-10-06T13:39:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:44 crc kubenswrapper[4757]: I1006 13:39:44.027340 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:44 crc kubenswrapper[4757]: I1006 13:39:44.027645 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:44 crc kubenswrapper[4757]: I1006 13:39:44.027743 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:44 crc kubenswrapper[4757]: I1006 13:39:44.027825 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:44 crc kubenswrapper[4757]: I1006 13:39:44.027926 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:44Z","lastTransitionTime":"2025-10-06T13:39:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:44 crc kubenswrapper[4757]: I1006 13:39:44.130138 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:44 crc kubenswrapper[4757]: I1006 13:39:44.130199 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:44 crc kubenswrapper[4757]: I1006 13:39:44.130209 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:44 crc kubenswrapper[4757]: I1006 13:39:44.130225 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:44 crc kubenswrapper[4757]: I1006 13:39:44.130238 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:44Z","lastTransitionTime":"2025-10-06T13:39:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:44 crc kubenswrapper[4757]: I1006 13:39:44.179881 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 06 13:39:44 crc kubenswrapper[4757]: I1006 13:39:44.179970 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 06 13:39:44 crc kubenswrapper[4757]: E1006 13:39:44.180342 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 06 13:39:44 crc kubenswrapper[4757]: I1006 13:39:44.180438 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-sc9qx" Oct 06 13:39:44 crc kubenswrapper[4757]: E1006 13:39:44.180468 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 06 13:39:44 crc kubenswrapper[4757]: E1006 13:39:44.180857 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-sc9qx" podUID="8a0a24d2-8946-4710-91f2-cc59ecedb5e3" Oct 06 13:39:44 crc kubenswrapper[4757]: I1006 13:39:44.233540 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:44 crc kubenswrapper[4757]: I1006 13:39:44.233574 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:44 crc kubenswrapper[4757]: I1006 13:39:44.233581 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:44 crc kubenswrapper[4757]: I1006 13:39:44.233596 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:44 crc kubenswrapper[4757]: I1006 13:39:44.233605 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:44Z","lastTransitionTime":"2025-10-06T13:39:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:44 crc kubenswrapper[4757]: I1006 13:39:44.336718 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:44 crc kubenswrapper[4757]: I1006 13:39:44.336758 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:44 crc kubenswrapper[4757]: I1006 13:39:44.336768 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:44 crc kubenswrapper[4757]: I1006 13:39:44.336785 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:44 crc kubenswrapper[4757]: I1006 13:39:44.336795 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:44Z","lastTransitionTime":"2025-10-06T13:39:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:44 crc kubenswrapper[4757]: I1006 13:39:44.439725 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:44 crc kubenswrapper[4757]: I1006 13:39:44.439763 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:44 crc kubenswrapper[4757]: I1006 13:39:44.439775 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:44 crc kubenswrapper[4757]: I1006 13:39:44.439790 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:44 crc kubenswrapper[4757]: I1006 13:39:44.439801 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:44Z","lastTransitionTime":"2025-10-06T13:39:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:44 crc kubenswrapper[4757]: I1006 13:39:44.542239 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:44 crc kubenswrapper[4757]: I1006 13:39:44.542290 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:44 crc kubenswrapper[4757]: I1006 13:39:44.542303 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:44 crc kubenswrapper[4757]: I1006 13:39:44.542321 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:44 crc kubenswrapper[4757]: I1006 13:39:44.542337 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:44Z","lastTransitionTime":"2025-10-06T13:39:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:44 crc kubenswrapper[4757]: I1006 13:39:44.644681 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:44 crc kubenswrapper[4757]: I1006 13:39:44.644744 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:44 crc kubenswrapper[4757]: I1006 13:39:44.644755 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:44 crc kubenswrapper[4757]: I1006 13:39:44.644776 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:44 crc kubenswrapper[4757]: I1006 13:39:44.644792 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:44Z","lastTransitionTime":"2025-10-06T13:39:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:44 crc kubenswrapper[4757]: I1006 13:39:44.747407 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:44 crc kubenswrapper[4757]: I1006 13:39:44.747470 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:44 crc kubenswrapper[4757]: I1006 13:39:44.747485 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:44 crc kubenswrapper[4757]: I1006 13:39:44.747503 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:44 crc kubenswrapper[4757]: I1006 13:39:44.747517 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:44Z","lastTransitionTime":"2025-10-06T13:39:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:44 crc kubenswrapper[4757]: I1006 13:39:44.850811 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:44 crc kubenswrapper[4757]: I1006 13:39:44.850964 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:44 crc kubenswrapper[4757]: I1006 13:39:44.850988 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:44 crc kubenswrapper[4757]: I1006 13:39:44.851013 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:44 crc kubenswrapper[4757]: I1006 13:39:44.851030 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:44Z","lastTransitionTime":"2025-10-06T13:39:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:44 crc kubenswrapper[4757]: I1006 13:39:44.958597 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:44 crc kubenswrapper[4757]: I1006 13:39:44.958656 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:44 crc kubenswrapper[4757]: I1006 13:39:44.958676 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:44 crc kubenswrapper[4757]: I1006 13:39:44.958706 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:44 crc kubenswrapper[4757]: I1006 13:39:44.958729 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:44Z","lastTransitionTime":"2025-10-06T13:39:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:45 crc kubenswrapper[4757]: I1006 13:39:45.062273 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:45 crc kubenswrapper[4757]: I1006 13:39:45.062334 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:45 crc kubenswrapper[4757]: I1006 13:39:45.062352 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:45 crc kubenswrapper[4757]: I1006 13:39:45.062375 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:45 crc kubenswrapper[4757]: I1006 13:39:45.062391 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:45Z","lastTransitionTime":"2025-10-06T13:39:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:45 crc kubenswrapper[4757]: I1006 13:39:45.164949 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:45 crc kubenswrapper[4757]: I1006 13:39:45.164997 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:45 crc kubenswrapper[4757]: I1006 13:39:45.165008 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:45 crc kubenswrapper[4757]: I1006 13:39:45.165023 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:45 crc kubenswrapper[4757]: I1006 13:39:45.165037 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:45Z","lastTransitionTime":"2025-10-06T13:39:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:45 crc kubenswrapper[4757]: I1006 13:39:45.179503 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 06 13:39:45 crc kubenswrapper[4757]: E1006 13:39:45.179702 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 06 13:39:45 crc kubenswrapper[4757]: I1006 13:39:45.268294 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:45 crc kubenswrapper[4757]: I1006 13:39:45.268361 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:45 crc kubenswrapper[4757]: I1006 13:39:45.268373 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:45 crc kubenswrapper[4757]: I1006 13:39:45.268392 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:45 crc kubenswrapper[4757]: I1006 13:39:45.268405 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:45Z","lastTransitionTime":"2025-10-06T13:39:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:45 crc kubenswrapper[4757]: I1006 13:39:45.371490 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:45 crc kubenswrapper[4757]: I1006 13:39:45.371540 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:45 crc kubenswrapper[4757]: I1006 13:39:45.371555 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:45 crc kubenswrapper[4757]: I1006 13:39:45.371578 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:45 crc kubenswrapper[4757]: I1006 13:39:45.371596 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:45Z","lastTransitionTime":"2025-10-06T13:39:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:45 crc kubenswrapper[4757]: I1006 13:39:45.475130 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:45 crc kubenswrapper[4757]: I1006 13:39:45.475192 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:45 crc kubenswrapper[4757]: I1006 13:39:45.475204 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:45 crc kubenswrapper[4757]: I1006 13:39:45.475223 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:45 crc kubenswrapper[4757]: I1006 13:39:45.475241 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:45Z","lastTransitionTime":"2025-10-06T13:39:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:45 crc kubenswrapper[4757]: I1006 13:39:45.578879 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:45 crc kubenswrapper[4757]: I1006 13:39:45.578957 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:45 crc kubenswrapper[4757]: I1006 13:39:45.578973 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:45 crc kubenswrapper[4757]: I1006 13:39:45.578998 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:45 crc kubenswrapper[4757]: I1006 13:39:45.579013 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:45Z","lastTransitionTime":"2025-10-06T13:39:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:45 crc kubenswrapper[4757]: I1006 13:39:45.681777 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:45 crc kubenswrapper[4757]: I1006 13:39:45.681828 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:45 crc kubenswrapper[4757]: I1006 13:39:45.681846 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:45 crc kubenswrapper[4757]: I1006 13:39:45.681867 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:45 crc kubenswrapper[4757]: I1006 13:39:45.681883 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:45Z","lastTransitionTime":"2025-10-06T13:39:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:45 crc kubenswrapper[4757]: I1006 13:39:45.784423 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:45 crc kubenswrapper[4757]: I1006 13:39:45.784464 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:45 crc kubenswrapper[4757]: I1006 13:39:45.784479 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:45 crc kubenswrapper[4757]: I1006 13:39:45.784500 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:45 crc kubenswrapper[4757]: I1006 13:39:45.784518 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:45Z","lastTransitionTime":"2025-10-06T13:39:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:45 crc kubenswrapper[4757]: I1006 13:39:45.887615 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:45 crc kubenswrapper[4757]: I1006 13:39:45.887654 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:45 crc kubenswrapper[4757]: I1006 13:39:45.887665 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:45 crc kubenswrapper[4757]: I1006 13:39:45.887684 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:45 crc kubenswrapper[4757]: I1006 13:39:45.887695 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:45Z","lastTransitionTime":"2025-10-06T13:39:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:45 crc kubenswrapper[4757]: I1006 13:39:45.990696 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:45 crc kubenswrapper[4757]: I1006 13:39:45.990760 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:45 crc kubenswrapper[4757]: I1006 13:39:45.990774 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:45 crc kubenswrapper[4757]: I1006 13:39:45.990793 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:45 crc kubenswrapper[4757]: I1006 13:39:45.991139 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:45Z","lastTransitionTime":"2025-10-06T13:39:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:46 crc kubenswrapper[4757]: I1006 13:39:46.099446 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:46 crc kubenswrapper[4757]: I1006 13:39:46.099505 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:46 crc kubenswrapper[4757]: I1006 13:39:46.099528 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:46 crc kubenswrapper[4757]: I1006 13:39:46.099557 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:46 crc kubenswrapper[4757]: I1006 13:39:46.099580 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:46Z","lastTransitionTime":"2025-10-06T13:39:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:46 crc kubenswrapper[4757]: I1006 13:39:46.179513 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 06 13:39:46 crc kubenswrapper[4757]: I1006 13:39:46.179606 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-sc9qx" Oct 06 13:39:46 crc kubenswrapper[4757]: I1006 13:39:46.179529 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 06 13:39:46 crc kubenswrapper[4757]: E1006 13:39:46.179767 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 06 13:39:46 crc kubenswrapper[4757]: E1006 13:39:46.179911 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 06 13:39:46 crc kubenswrapper[4757]: E1006 13:39:46.180088 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-sc9qx" podUID="8a0a24d2-8946-4710-91f2-cc59ecedb5e3" Oct 06 13:39:46 crc kubenswrapper[4757]: I1006 13:39:46.205127 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:46 crc kubenswrapper[4757]: I1006 13:39:46.205185 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:46 crc kubenswrapper[4757]: I1006 13:39:46.205210 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:46 crc kubenswrapper[4757]: I1006 13:39:46.205238 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:46 crc kubenswrapper[4757]: I1006 13:39:46.205259 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:46Z","lastTransitionTime":"2025-10-06T13:39:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:46 crc kubenswrapper[4757]: I1006 13:39:46.308056 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:46 crc kubenswrapper[4757]: I1006 13:39:46.308115 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:46 crc kubenswrapper[4757]: I1006 13:39:46.308127 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:46 crc kubenswrapper[4757]: I1006 13:39:46.308144 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:46 crc kubenswrapper[4757]: I1006 13:39:46.308156 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:46Z","lastTransitionTime":"2025-10-06T13:39:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:46 crc kubenswrapper[4757]: I1006 13:39:46.411053 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:46 crc kubenswrapper[4757]: I1006 13:39:46.411156 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:46 crc kubenswrapper[4757]: I1006 13:39:46.411179 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:46 crc kubenswrapper[4757]: I1006 13:39:46.411208 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:46 crc kubenswrapper[4757]: I1006 13:39:46.411230 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:46Z","lastTransitionTime":"2025-10-06T13:39:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:46 crc kubenswrapper[4757]: I1006 13:39:46.514046 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:46 crc kubenswrapper[4757]: I1006 13:39:46.514611 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:46 crc kubenswrapper[4757]: I1006 13:39:46.514686 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:46 crc kubenswrapper[4757]: I1006 13:39:46.514764 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:46 crc kubenswrapper[4757]: I1006 13:39:46.514829 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:46Z","lastTransitionTime":"2025-10-06T13:39:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:46 crc kubenswrapper[4757]: I1006 13:39:46.617921 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:46 crc kubenswrapper[4757]: I1006 13:39:46.618416 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:46 crc kubenswrapper[4757]: I1006 13:39:46.618522 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:46 crc kubenswrapper[4757]: I1006 13:39:46.618610 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:46 crc kubenswrapper[4757]: I1006 13:39:46.618691 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:46Z","lastTransitionTime":"2025-10-06T13:39:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:46 crc kubenswrapper[4757]: I1006 13:39:46.722070 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:46 crc kubenswrapper[4757]: I1006 13:39:46.722176 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:46 crc kubenswrapper[4757]: I1006 13:39:46.722194 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:46 crc kubenswrapper[4757]: I1006 13:39:46.722218 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:46 crc kubenswrapper[4757]: I1006 13:39:46.722238 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:46Z","lastTransitionTime":"2025-10-06T13:39:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:46 crc kubenswrapper[4757]: I1006 13:39:46.825137 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:46 crc kubenswrapper[4757]: I1006 13:39:46.825217 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:46 crc kubenswrapper[4757]: I1006 13:39:46.825268 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:46 crc kubenswrapper[4757]: I1006 13:39:46.825297 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:46 crc kubenswrapper[4757]: I1006 13:39:46.825318 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:46Z","lastTransitionTime":"2025-10-06T13:39:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:46 crc kubenswrapper[4757]: I1006 13:39:46.928880 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:46 crc kubenswrapper[4757]: I1006 13:39:46.928942 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:46 crc kubenswrapper[4757]: I1006 13:39:46.928960 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:46 crc kubenswrapper[4757]: I1006 13:39:46.928985 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:46 crc kubenswrapper[4757]: I1006 13:39:46.929003 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:46Z","lastTransitionTime":"2025-10-06T13:39:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:47 crc kubenswrapper[4757]: I1006 13:39:47.031898 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:47 crc kubenswrapper[4757]: I1006 13:39:47.031970 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:47 crc kubenswrapper[4757]: I1006 13:39:47.031993 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:47 crc kubenswrapper[4757]: I1006 13:39:47.032019 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:47 crc kubenswrapper[4757]: I1006 13:39:47.032039 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:47Z","lastTransitionTime":"2025-10-06T13:39:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:47 crc kubenswrapper[4757]: I1006 13:39:47.135086 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:47 crc kubenswrapper[4757]: I1006 13:39:47.135176 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:47 crc kubenswrapper[4757]: I1006 13:39:47.135193 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:47 crc kubenswrapper[4757]: I1006 13:39:47.135216 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:47 crc kubenswrapper[4757]: I1006 13:39:47.135231 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:47Z","lastTransitionTime":"2025-10-06T13:39:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:47 crc kubenswrapper[4757]: I1006 13:39:47.179541 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 06 13:39:47 crc kubenswrapper[4757]: E1006 13:39:47.179732 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 06 13:39:47 crc kubenswrapper[4757]: I1006 13:39:47.238504 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:47 crc kubenswrapper[4757]: I1006 13:39:47.238582 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:47 crc kubenswrapper[4757]: I1006 13:39:47.238605 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:47 crc kubenswrapper[4757]: I1006 13:39:47.238639 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:47 crc kubenswrapper[4757]: I1006 13:39:47.238661 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:47Z","lastTransitionTime":"2025-10-06T13:39:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:47 crc kubenswrapper[4757]: I1006 13:39:47.342303 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:47 crc kubenswrapper[4757]: I1006 13:39:47.342344 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:47 crc kubenswrapper[4757]: I1006 13:39:47.342355 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:47 crc kubenswrapper[4757]: I1006 13:39:47.342381 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:47 crc kubenswrapper[4757]: I1006 13:39:47.342394 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:47Z","lastTransitionTime":"2025-10-06T13:39:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:47 crc kubenswrapper[4757]: I1006 13:39:47.445634 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:47 crc kubenswrapper[4757]: I1006 13:39:47.445705 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:47 crc kubenswrapper[4757]: I1006 13:39:47.445726 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:47 crc kubenswrapper[4757]: I1006 13:39:47.445751 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:47 crc kubenswrapper[4757]: I1006 13:39:47.445772 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:47Z","lastTransitionTime":"2025-10-06T13:39:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:47 crc kubenswrapper[4757]: I1006 13:39:47.549265 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:47 crc kubenswrapper[4757]: I1006 13:39:47.549326 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:47 crc kubenswrapper[4757]: I1006 13:39:47.549343 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:47 crc kubenswrapper[4757]: I1006 13:39:47.549366 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:47 crc kubenswrapper[4757]: I1006 13:39:47.549383 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:47Z","lastTransitionTime":"2025-10-06T13:39:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:47 crc kubenswrapper[4757]: I1006 13:39:47.652281 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:47 crc kubenswrapper[4757]: I1006 13:39:47.652342 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:47 crc kubenswrapper[4757]: I1006 13:39:47.652363 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:47 crc kubenswrapper[4757]: I1006 13:39:47.652389 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:47 crc kubenswrapper[4757]: I1006 13:39:47.652410 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:47Z","lastTransitionTime":"2025-10-06T13:39:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:47 crc kubenswrapper[4757]: I1006 13:39:47.755824 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:47 crc kubenswrapper[4757]: I1006 13:39:47.755888 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:47 crc kubenswrapper[4757]: I1006 13:39:47.755904 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:47 crc kubenswrapper[4757]: I1006 13:39:47.755929 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:47 crc kubenswrapper[4757]: I1006 13:39:47.755946 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:47Z","lastTransitionTime":"2025-10-06T13:39:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:47 crc kubenswrapper[4757]: I1006 13:39:47.860748 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:47 crc kubenswrapper[4757]: I1006 13:39:47.860812 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:47 crc kubenswrapper[4757]: I1006 13:39:47.860829 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:47 crc kubenswrapper[4757]: I1006 13:39:47.860854 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:47 crc kubenswrapper[4757]: I1006 13:39:47.860871 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:47Z","lastTransitionTime":"2025-10-06T13:39:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:47 crc kubenswrapper[4757]: I1006 13:39:47.963880 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:47 crc kubenswrapper[4757]: I1006 13:39:47.963947 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:47 crc kubenswrapper[4757]: I1006 13:39:47.963961 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:47 crc kubenswrapper[4757]: I1006 13:39:47.963982 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:47 crc kubenswrapper[4757]: I1006 13:39:47.963997 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:47Z","lastTransitionTime":"2025-10-06T13:39:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:48 crc kubenswrapper[4757]: I1006 13:39:48.066873 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:48 crc kubenswrapper[4757]: I1006 13:39:48.066904 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:48 crc kubenswrapper[4757]: I1006 13:39:48.066912 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:48 crc kubenswrapper[4757]: I1006 13:39:48.066924 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:48 crc kubenswrapper[4757]: I1006 13:39:48.066933 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:48Z","lastTransitionTime":"2025-10-06T13:39:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:48 crc kubenswrapper[4757]: I1006 13:39:48.169665 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:48 crc kubenswrapper[4757]: I1006 13:39:48.169717 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:48 crc kubenswrapper[4757]: I1006 13:39:48.169729 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:48 crc kubenswrapper[4757]: I1006 13:39:48.169746 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:48 crc kubenswrapper[4757]: I1006 13:39:48.169758 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:48Z","lastTransitionTime":"2025-10-06T13:39:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:48 crc kubenswrapper[4757]: I1006 13:39:48.179037 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 06 13:39:48 crc kubenswrapper[4757]: I1006 13:39:48.179144 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 06 13:39:48 crc kubenswrapper[4757]: I1006 13:39:48.179053 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-sc9qx" Oct 06 13:39:48 crc kubenswrapper[4757]: E1006 13:39:48.179262 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 06 13:39:48 crc kubenswrapper[4757]: E1006 13:39:48.179370 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-sc9qx" podUID="8a0a24d2-8946-4710-91f2-cc59ecedb5e3" Oct 06 13:39:48 crc kubenswrapper[4757]: E1006 13:39:48.179483 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 06 13:39:48 crc kubenswrapper[4757]: I1006 13:39:48.272552 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:48 crc kubenswrapper[4757]: I1006 13:39:48.272636 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:48 crc kubenswrapper[4757]: I1006 13:39:48.272661 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:48 crc kubenswrapper[4757]: I1006 13:39:48.272689 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:48 crc kubenswrapper[4757]: I1006 13:39:48.272711 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:48Z","lastTransitionTime":"2025-10-06T13:39:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:48 crc kubenswrapper[4757]: I1006 13:39:48.375250 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:48 crc kubenswrapper[4757]: I1006 13:39:48.375315 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:48 crc kubenswrapper[4757]: I1006 13:39:48.375335 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:48 crc kubenswrapper[4757]: I1006 13:39:48.375360 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:48 crc kubenswrapper[4757]: I1006 13:39:48.375377 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:48Z","lastTransitionTime":"2025-10-06T13:39:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:48 crc kubenswrapper[4757]: I1006 13:39:48.478619 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:48 crc kubenswrapper[4757]: I1006 13:39:48.478679 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:48 crc kubenswrapper[4757]: I1006 13:39:48.478696 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:48 crc kubenswrapper[4757]: I1006 13:39:48.478722 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:48 crc kubenswrapper[4757]: I1006 13:39:48.478741 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:48Z","lastTransitionTime":"2025-10-06T13:39:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:48 crc kubenswrapper[4757]: I1006 13:39:48.581884 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:48 crc kubenswrapper[4757]: I1006 13:39:48.581945 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:48 crc kubenswrapper[4757]: I1006 13:39:48.581980 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:48 crc kubenswrapper[4757]: I1006 13:39:48.582010 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:48 crc kubenswrapper[4757]: I1006 13:39:48.582030 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:48Z","lastTransitionTime":"2025-10-06T13:39:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:48 crc kubenswrapper[4757]: I1006 13:39:48.684204 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:48 crc kubenswrapper[4757]: I1006 13:39:48.684235 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:48 crc kubenswrapper[4757]: I1006 13:39:48.684243 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:48 crc kubenswrapper[4757]: I1006 13:39:48.684255 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:48 crc kubenswrapper[4757]: I1006 13:39:48.684265 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:48Z","lastTransitionTime":"2025-10-06T13:39:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:48 crc kubenswrapper[4757]: I1006 13:39:48.787347 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:48 crc kubenswrapper[4757]: I1006 13:39:48.787399 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:48 crc kubenswrapper[4757]: I1006 13:39:48.787415 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:48 crc kubenswrapper[4757]: I1006 13:39:48.787439 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:48 crc kubenswrapper[4757]: I1006 13:39:48.787459 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:48Z","lastTransitionTime":"2025-10-06T13:39:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:48 crc kubenswrapper[4757]: I1006 13:39:48.890709 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:48 crc kubenswrapper[4757]: I1006 13:39:48.890768 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:48 crc kubenswrapper[4757]: I1006 13:39:48.890784 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:48 crc kubenswrapper[4757]: I1006 13:39:48.890808 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:48 crc kubenswrapper[4757]: I1006 13:39:48.890826 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:48Z","lastTransitionTime":"2025-10-06T13:39:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:48 crc kubenswrapper[4757]: I1006 13:39:48.993257 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:48 crc kubenswrapper[4757]: I1006 13:39:48.993303 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:48 crc kubenswrapper[4757]: I1006 13:39:48.993315 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:48 crc kubenswrapper[4757]: I1006 13:39:48.993331 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:48 crc kubenswrapper[4757]: I1006 13:39:48.993345 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:48Z","lastTransitionTime":"2025-10-06T13:39:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:49 crc kubenswrapper[4757]: I1006 13:39:49.096402 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:49 crc kubenswrapper[4757]: I1006 13:39:49.096451 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:49 crc kubenswrapper[4757]: I1006 13:39:49.096463 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:49 crc kubenswrapper[4757]: I1006 13:39:49.096483 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:49 crc kubenswrapper[4757]: I1006 13:39:49.096496 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:49Z","lastTransitionTime":"2025-10-06T13:39:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:49 crc kubenswrapper[4757]: I1006 13:39:49.179851 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 06 13:39:49 crc kubenswrapper[4757]: E1006 13:39:49.180007 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 06 13:39:49 crc kubenswrapper[4757]: I1006 13:39:49.198894 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:49 crc kubenswrapper[4757]: I1006 13:39:49.198949 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:49 crc kubenswrapper[4757]: I1006 13:39:49.198971 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:49 crc kubenswrapper[4757]: I1006 13:39:49.199003 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:49 crc kubenswrapper[4757]: I1006 13:39:49.199025 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:49Z","lastTransitionTime":"2025-10-06T13:39:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:49 crc kubenswrapper[4757]: I1006 13:39:49.302557 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:49 crc kubenswrapper[4757]: I1006 13:39:49.302615 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:49 crc kubenswrapper[4757]: I1006 13:39:49.302637 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:49 crc kubenswrapper[4757]: I1006 13:39:49.302665 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:49 crc kubenswrapper[4757]: I1006 13:39:49.302686 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:49Z","lastTransitionTime":"2025-10-06T13:39:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:49 crc kubenswrapper[4757]: I1006 13:39:49.405711 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:49 crc kubenswrapper[4757]: I1006 13:39:49.405766 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:49 crc kubenswrapper[4757]: I1006 13:39:49.405783 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:49 crc kubenswrapper[4757]: I1006 13:39:49.405836 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:49 crc kubenswrapper[4757]: I1006 13:39:49.405854 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:49Z","lastTransitionTime":"2025-10-06T13:39:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:49 crc kubenswrapper[4757]: I1006 13:39:49.508168 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:49 crc kubenswrapper[4757]: I1006 13:39:49.508233 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:49 crc kubenswrapper[4757]: I1006 13:39:49.508252 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:49 crc kubenswrapper[4757]: I1006 13:39:49.508273 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:49 crc kubenswrapper[4757]: I1006 13:39:49.508286 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:49Z","lastTransitionTime":"2025-10-06T13:39:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:49 crc kubenswrapper[4757]: I1006 13:39:49.611546 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:49 crc kubenswrapper[4757]: I1006 13:39:49.611613 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:49 crc kubenswrapper[4757]: I1006 13:39:49.611630 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:49 crc kubenswrapper[4757]: I1006 13:39:49.611653 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:49 crc kubenswrapper[4757]: I1006 13:39:49.611670 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:49Z","lastTransitionTime":"2025-10-06T13:39:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:49 crc kubenswrapper[4757]: I1006 13:39:49.714935 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:49 crc kubenswrapper[4757]: I1006 13:39:49.714995 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:49 crc kubenswrapper[4757]: I1006 13:39:49.715011 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:49 crc kubenswrapper[4757]: I1006 13:39:49.715033 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:49 crc kubenswrapper[4757]: I1006 13:39:49.715050 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:49Z","lastTransitionTime":"2025-10-06T13:39:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:49 crc kubenswrapper[4757]: I1006 13:39:49.735585 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:49 crc kubenswrapper[4757]: I1006 13:39:49.735652 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:49 crc kubenswrapper[4757]: I1006 13:39:49.735674 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:49 crc kubenswrapper[4757]: I1006 13:39:49.735702 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:49 crc kubenswrapper[4757]: I1006 13:39:49.735720 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:49Z","lastTransitionTime":"2025-10-06T13:39:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:49 crc kubenswrapper[4757]: E1006 13:39:49.762019 4757 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:39:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:39:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:49Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:39:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:39:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:49Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e1ec82ec-57e6-47de-8235-c2486415aecd\\\",\\\"systemUUID\\\":\\\"77015af3-b2cf-40c4-8ed8-504c8efcff1f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:49Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:49 crc kubenswrapper[4757]: I1006 13:39:49.767523 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:49 crc kubenswrapper[4757]: I1006 13:39:49.767571 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:49 crc kubenswrapper[4757]: I1006 13:39:49.767581 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:49 crc kubenswrapper[4757]: I1006 13:39:49.767597 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:49 crc kubenswrapper[4757]: I1006 13:39:49.767607 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:49Z","lastTransitionTime":"2025-10-06T13:39:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:49 crc kubenswrapper[4757]: E1006 13:39:49.782123 4757 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:39:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:39:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:49Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:39:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:39:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:49Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e1ec82ec-57e6-47de-8235-c2486415aecd\\\",\\\"systemUUID\\\":\\\"77015af3-b2cf-40c4-8ed8-504c8efcff1f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:49Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:49 crc kubenswrapper[4757]: I1006 13:39:49.786206 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:49 crc kubenswrapper[4757]: I1006 13:39:49.786246 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:49 crc kubenswrapper[4757]: I1006 13:39:49.786258 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:49 crc kubenswrapper[4757]: I1006 13:39:49.786276 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:49 crc kubenswrapper[4757]: I1006 13:39:49.786290 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:49Z","lastTransitionTime":"2025-10-06T13:39:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:49 crc kubenswrapper[4757]: E1006 13:39:49.805508 4757 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:39:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:39:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:49Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:39:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:39:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:49Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e1ec82ec-57e6-47de-8235-c2486415aecd\\\",\\\"systemUUID\\\":\\\"77015af3-b2cf-40c4-8ed8-504c8efcff1f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:49Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:49 crc kubenswrapper[4757]: I1006 13:39:49.809782 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:49 crc kubenswrapper[4757]: I1006 13:39:49.809834 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:49 crc kubenswrapper[4757]: I1006 13:39:49.809851 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:49 crc kubenswrapper[4757]: I1006 13:39:49.809873 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:49 crc kubenswrapper[4757]: I1006 13:39:49.809890 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:49Z","lastTransitionTime":"2025-10-06T13:39:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:49 crc kubenswrapper[4757]: E1006 13:39:49.828733 4757 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:39:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:39:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:49Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:39:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:39:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:49Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e1ec82ec-57e6-47de-8235-c2486415aecd\\\",\\\"systemUUID\\\":\\\"77015af3-b2cf-40c4-8ed8-504c8efcff1f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:49Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:49 crc kubenswrapper[4757]: I1006 13:39:49.834187 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:49 crc kubenswrapper[4757]: I1006 13:39:49.834247 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:49 crc kubenswrapper[4757]: I1006 13:39:49.834265 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:49 crc kubenswrapper[4757]: I1006 13:39:49.834291 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:49 crc kubenswrapper[4757]: I1006 13:39:49.834309 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:49Z","lastTransitionTime":"2025-10-06T13:39:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:49 crc kubenswrapper[4757]: E1006 13:39:49.854345 4757 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:39:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:39:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:49Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:39:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-06T13:39:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:49Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"e1ec82ec-57e6-47de-8235-c2486415aecd\\\",\\\"systemUUID\\\":\\\"77015af3-b2cf-40c4-8ed8-504c8efcff1f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:49Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:49 crc kubenswrapper[4757]: E1006 13:39:49.854592 4757 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 06 13:39:49 crc kubenswrapper[4757]: I1006 13:39:49.857071 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:49 crc kubenswrapper[4757]: I1006 13:39:49.857161 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:49 crc kubenswrapper[4757]: I1006 13:39:49.857178 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:49 crc kubenswrapper[4757]: I1006 13:39:49.857204 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:49 crc kubenswrapper[4757]: I1006 13:39:49.857228 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:49Z","lastTransitionTime":"2025-10-06T13:39:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:49 crc kubenswrapper[4757]: I1006 13:39:49.960668 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:49 crc kubenswrapper[4757]: I1006 13:39:49.960708 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:49 crc kubenswrapper[4757]: I1006 13:39:49.960717 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:49 crc kubenswrapper[4757]: I1006 13:39:49.960732 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:49 crc kubenswrapper[4757]: I1006 13:39:49.960742 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:49Z","lastTransitionTime":"2025-10-06T13:39:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:50 crc kubenswrapper[4757]: I1006 13:39:50.064265 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:50 crc kubenswrapper[4757]: I1006 13:39:50.064314 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:50 crc kubenswrapper[4757]: I1006 13:39:50.064322 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:50 crc kubenswrapper[4757]: I1006 13:39:50.064336 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:50 crc kubenswrapper[4757]: I1006 13:39:50.064346 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:50Z","lastTransitionTime":"2025-10-06T13:39:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:50 crc kubenswrapper[4757]: I1006 13:39:50.166542 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:50 crc kubenswrapper[4757]: I1006 13:39:50.166593 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:50 crc kubenswrapper[4757]: I1006 13:39:50.166605 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:50 crc kubenswrapper[4757]: I1006 13:39:50.166627 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:50 crc kubenswrapper[4757]: I1006 13:39:50.166639 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:50Z","lastTransitionTime":"2025-10-06T13:39:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:50 crc kubenswrapper[4757]: I1006 13:39:50.179830 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-sc9qx" Oct 06 13:39:50 crc kubenswrapper[4757]: E1006 13:39:50.179963 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-sc9qx" podUID="8a0a24d2-8946-4710-91f2-cc59ecedb5e3" Oct 06 13:39:50 crc kubenswrapper[4757]: I1006 13:39:50.180399 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 06 13:39:50 crc kubenswrapper[4757]: I1006 13:39:50.180529 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 06 13:39:50 crc kubenswrapper[4757]: E1006 13:39:50.180739 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 06 13:39:50 crc kubenswrapper[4757]: I1006 13:39:50.180805 4757 scope.go:117] "RemoveContainer" containerID="737f180b936ea2af92cdc2a3c77290ee6ad30c131c7fd3301562f96387c8a211" Oct 06 13:39:50 crc kubenswrapper[4757]: E1006 13:39:50.180978 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 06 13:39:50 crc kubenswrapper[4757]: I1006 13:39:50.269856 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:50 crc kubenswrapper[4757]: I1006 13:39:50.269932 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:50 crc kubenswrapper[4757]: I1006 13:39:50.269953 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:50 crc kubenswrapper[4757]: I1006 13:39:50.269983 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:50 crc kubenswrapper[4757]: I1006 13:39:50.270004 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:50Z","lastTransitionTime":"2025-10-06T13:39:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:50 crc kubenswrapper[4757]: I1006 13:39:50.372583 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:50 crc kubenswrapper[4757]: I1006 13:39:50.372981 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:50 crc kubenswrapper[4757]: I1006 13:39:50.372994 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:50 crc kubenswrapper[4757]: I1006 13:39:50.373013 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:50 crc kubenswrapper[4757]: I1006 13:39:50.373025 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:50Z","lastTransitionTime":"2025-10-06T13:39:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:50 crc kubenswrapper[4757]: I1006 13:39:50.476175 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:50 crc kubenswrapper[4757]: I1006 13:39:50.476215 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:50 crc kubenswrapper[4757]: I1006 13:39:50.476225 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:50 crc kubenswrapper[4757]: I1006 13:39:50.476241 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:50 crc kubenswrapper[4757]: I1006 13:39:50.476256 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:50Z","lastTransitionTime":"2025-10-06T13:39:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:50 crc kubenswrapper[4757]: I1006 13:39:50.579060 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:50 crc kubenswrapper[4757]: I1006 13:39:50.579133 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:50 crc kubenswrapper[4757]: I1006 13:39:50.579147 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:50 crc kubenswrapper[4757]: I1006 13:39:50.579161 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:50 crc kubenswrapper[4757]: I1006 13:39:50.579172 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:50Z","lastTransitionTime":"2025-10-06T13:39:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:50 crc kubenswrapper[4757]: I1006 13:39:50.639195 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-58bhb_a6624d05-e024-49f2-bf87-33e7ea4fccbb/ovnkube-controller/2.log" Oct 06 13:39:50 crc kubenswrapper[4757]: I1006 13:39:50.642339 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" event={"ID":"a6624d05-e024-49f2-bf87-33e7ea4fccbb","Type":"ContainerStarted","Data":"c59b4f6d5b02fd4640ea7a51803ca4d31b0440d8779c46f342b1e5af2e8a2bf0"} Oct 06 13:39:50 crc kubenswrapper[4757]: I1006 13:39:50.642718 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" Oct 06 13:39:50 crc kubenswrapper[4757]: I1006 13:39:50.658926 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aab0f26-c4f7-40b2-892f-0c7d8586a1c1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4119269e9f663bc5c38f4fdc8a5b32f1c3d8446fed6fd36172c41207c9c287b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1bef054ed2b7c27f200f7ab089158ba9946ca17b507c85c377504f539812fcf6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb90630132db1396d9009fe35d6d6b8ab6afe59d32f196cf7a10f3179103227\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://123bbe58d8664c8fad1b86c9c321228f7ffe7cdf14c44e1d9be7802e10a3f0d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://813c083e0e8ba3b397a4b2795de1eac82299d7f673832272138f20162b7f2db2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-06T13:38:46Z\\\",\\\"message\\\":\\\"W1006 13:38:35.655009 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1006 13:38:35.655588 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759757915 cert, and key in /tmp/serving-cert-2033919833/serving-signer.crt, /tmp/serving-cert-2033919833/serving-signer.key\\\\nI1006 13:38:35.906575 1 observer_polling.go:159] Starting file observer\\\\nW1006 13:38:35.908632 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1006 13:38:35.908812 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1006 13:38:35.911572 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2033919833/tls.crt::/tmp/serving-cert-2033919833/tls.key\\\\\\\"\\\\nF1006 13:38:46.393904 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:35Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18af66d22d5905beeec34c4d158a88aadb1a8076e423996cd42e4172a31426e0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:35Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca584765a9838801f99c7fb1f7bbcc740e684033b1b19035ef95b72526c6eda1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca584765a9838801f99c7fb1f7bbcc740e684033b1b19035ef95b72526c6eda1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:32Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:50Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:50 crc kubenswrapper[4757]: I1006 13:39:50.672343 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d100743e6a2245c99a227c3380c1e9cbdfec0eff361d21977a3e313fa8f7bfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:50Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:50 crc kubenswrapper[4757]: I1006 13:39:50.680945 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:50 crc kubenswrapper[4757]: I1006 13:39:50.680977 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:50 crc kubenswrapper[4757]: I1006 13:39:50.680985 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:50 crc kubenswrapper[4757]: I1006 13:39:50.680999 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:50 crc kubenswrapper[4757]: I1006 13:39:50.681008 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:50Z","lastTransitionTime":"2025-10-06T13:39:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:50 crc kubenswrapper[4757]: I1006 13:39:50.687131 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cd6138e-cd0c-4140-8173-eeb737e5b20a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e5392ae609235f0d0526e4d115125c20a75af131b89c07ae6bfa3b4bac84108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d06bc2afab6df22e2c8140de3e6288a3e5f4190b6bdb31a2e71170275e70cae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://056d1a4bdaee6e39a0ba0105c9b48d23cd37c37936951e8e2aa123f6de93463d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e08edbfb6418e26eb6c3ef0d229a623bc625e8e20c29d41a95adc8ac8dc2534\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:32Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:50Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:50 crc kubenswrapper[4757]: I1006 13:39:50.701503 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c409c81-c170-4308-98b3-fb1cc65ff1bb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d9249418a973cd8c582a5f126d293b35071887e197f98c0c911823fc76e7ab90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a66ed2ef9cbfedf37b5bc458002c81c7d608f3bb904f5e7480d9874cfe29ada\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://69aec338eb12fbfcd83c476590f12c4a2b4aff507981052b00060900f88a26a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fbe7f72f01daeac82347a11112c1138e744a97c0df2d111fef60c8be750ce39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9fbe7f72f01daeac82347a11112c1138e744a97c0df2d111fef60c8be750ce39\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:32Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:50Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:50 crc kubenswrapper[4757]: I1006 13:39:50.719943 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0010c888-d5ad-4b2b-8309-1647fdf0dee3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a2028c8aaf6b1504a3b00edc15272958a5254f6e621734988e959fe0720f1ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kvf5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://151c97566cda63000d69b51293c84fad70c6e331d3a1b62bd3a03ee5732d5214\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kvf5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-7tb7h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:50Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:50 crc kubenswrapper[4757]: I1006 13:39:50.733458 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9qf7z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9144d9fd-70d7-4a29-8e6b-c020c611980a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec9c673c59529f08cb919af940f8384584309e4e113c65431a529b935d1df3f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7771512cbe14fa9b8d61be3a354a5fb8436b84ddf455ae4426440f1599f7ad90\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-06T13:39:39Z\\\",\\\"message\\\":\\\"2025-10-06T13:38:54+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_adb47832-9446-4ad4-96e7-a6a131816e54\\\\n2025-10-06T13:38:54+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_adb47832-9446-4ad4-96e7-a6a131816e54 to /host/opt/cni/bin/\\\\n2025-10-06T13:38:54Z [verbose] multus-daemon started\\\\n2025-10-06T13:38:54Z [verbose] Readiness Indicator file check\\\\n2025-10-06T13:39:39Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:39:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mf4fb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9qf7z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:50Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:50 crc kubenswrapper[4757]: I1006 13:39:50.751326 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rhrzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62d4cce6-0583-40a6-b7ea-2996d07b49b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://886c6f797531b702123d9931997b7b5e3ebcf9c14c566ff90e0382eae806f925\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a6cfbbedbad94b500e9bc7c5777459ffbf3db038e8a520a2c9c28d381955df55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a6cfbbedbad94b500e9bc7c5777459ffbf3db038e8a520a2c9c28d381955df55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8c16d3a08acde90ccf1beb5e9ced373395af7436ddbc05c24f6489f6f49fc75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8c16d3a08acde90ccf1beb5e9ced373395af7436ddbc05c24f6489f6f49fc75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7471a1d670fcee421cb43e0a8b8a2b5e15c9d428659f8d63be54895493225242\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7471a1d670fcee421cb43e0a8b8a2b5e15c9d428659f8d63be54895493225242\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d5889f0b8d556cbb5791eb0d9d5475c4fa8beb0c5e9614081c8a7002353282a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d5889f0b8d556cbb5791eb0d9d5475c4fa8beb0c5e9614081c8a7002353282a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0e664e1241d006c37ace1635ef93d30b3062960868874ab67b024d872e8b352\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0e664e1241d006c37ace1635ef93d30b3062960868874ab67b024d872e8b352\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4cbf946357ea3bc8763e1c1b16d8794976662c55043426132f1ece7c52dca82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4cbf946357ea3bc8763e1c1b16d8794976662c55043426132f1ece7c52dca82\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rhrzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:50Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:50 crc kubenswrapper[4757]: I1006 13:39:50.764218 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-j889t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a097c3c1-7cfd-4ba8-878d-40b971843e92\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cf3eee034e9bc6cc2729e4cd23569f535ea8d2058f1576f75baaa5cf8407f2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-29wtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:54Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-j889t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:50Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:50 crc kubenswrapper[4757]: I1006 13:39:50.776888 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l2rjj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"34af6a7e-1458-4f7c-bc54-69fb80966b6b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://53547fa1f6011c3003bed92bbd7200050777786dd1fe8b3979b8d9c695c7259d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:39:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5csnl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3d629cc4ee4a614d547fe962c1367b1717f185765e1ccc325fd941f37e3d656\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:39:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5csnl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:39:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-l2rjj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:50Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:50 crc kubenswrapper[4757]: I1006 13:39:50.783553 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:50 crc kubenswrapper[4757]: I1006 13:39:50.783596 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:50 crc kubenswrapper[4757]: I1006 13:39:50.783607 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:50 crc kubenswrapper[4757]: I1006 13:39:50.783624 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:50 crc kubenswrapper[4757]: I1006 13:39:50.783634 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:50Z","lastTransitionTime":"2025-10-06T13:39:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:50 crc kubenswrapper[4757]: I1006 13:39:50.791648 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b45680ebd17f15c56456a2c0eb5c7b58f6f42524be18df11dacbb3f4d904eac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6454ffe81c62fdfd6a5c0ee4aff48e55b1dd83b14b07a93f915fc3023f6bd255\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:50Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:50 crc kubenswrapper[4757]: I1006 13:39:50.808366 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:50Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:50 crc kubenswrapper[4757]: I1006 13:39:50.823842 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:50Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:50 crc kubenswrapper[4757]: I1006 13:39:50.838035 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9j5jn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de18b9fe-e396-469e-a6f6-d87ce91f3270\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4b5ed23b64e4bbc9f76f8cdef77d7f608d8ba029b540b69f64faf266f843155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2cslg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9j5jn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:50Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:50 crc kubenswrapper[4757]: I1006 13:39:50.855236 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70151c959d98e2cb056e57e50b67c68b1c031beb375b357c8941bc9b81dc040f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:50Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:50 crc kubenswrapper[4757]: I1006 13:39:50.870162 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:50Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:50 crc kubenswrapper[4757]: I1006 13:39:50.886819 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:50 crc kubenswrapper[4757]: I1006 13:39:50.886874 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:50 crc kubenswrapper[4757]: I1006 13:39:50.886886 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:50 crc kubenswrapper[4757]: I1006 13:39:50.886906 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:50 crc kubenswrapper[4757]: I1006 13:39:50.886925 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:50Z","lastTransitionTime":"2025-10-06T13:39:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:50 crc kubenswrapper[4757]: I1006 13:39:50.892954 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6624d05-e024-49f2-bf87-33e7ea4fccbb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbe06e8892901366539846fa68ea4e379c32ca7b21843383dc4a72ac70f92b98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75c8038f49690378c7322e4d2e50f7d5073bb744c3f64317b7c3f4acffaa338d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://df2b012b79afa6379b4a7add72a9093bbdbfcc8d618d6eae350a773fdddacfa8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4093bde478f4910c2b1e2341bb0b3f640454b131e31b9ab7ad5900b8260b7561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1580d51cb7ec22f70787157dc80236a312eca77785329c663d78c02ee9a0685d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38df7deb98a16e465d5fdbad8e04c2e62c5cfb418a48fe9f01da335dba2907ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c59b4f6d5b02fd4640ea7a51803ca4d31b0440d8779c46f342b1e5af2e8a2bf0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://737f180b936ea2af92cdc2a3c77290ee6ad30c131c7fd3301562f96387c8a211\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-06T13:39:20Z\\\",\\\"message\\\":\\\"e (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1006 13:39:20.135763 6389 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1006 13:39:20.135790 6389 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1006 13:39:20.135830 6389 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1006 13:39:20.135845 6389 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1006 13:39:20.135848 6389 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1006 13:39:20.135872 6389 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1006 13:39:20.135873 6389 handler.go:208] Removed *v1.Node event handler 2\\\\nI1006 13:39:20.135888 6389 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1006 13:39:20.135916 6389 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1006 13:39:20.135920 6389 handler.go:208] Removed *v1.Node event handler 7\\\\nI1006 13:39:20.135944 6389 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1006 13:39:20.135983 6389 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1006 13:39:20.135991 6389 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1006 13:39:20.135996 6389 factory.go:656] Stopping watch factory\\\\nI1006 13:39:20.136011 6389 handler.go:208] Removed *v1.EgressFirewall ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-06T13:39:19Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:39:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5852bc46ae07b295a97cca9f5ef7d06484c12744c0f6ee57b2d14acee319fcaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3fe5f1f425ddd770c518939dee061dc2641c1ca6c3538d8d30ac0724a6353936\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3fe5f1f425ddd770c518939dee061dc2641c1ca6c3538d8d30ac0724a6353936\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-58bhb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:50Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:50 crc kubenswrapper[4757]: I1006 13:39:50.906181 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-sc9qx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a0a24d2-8946-4710-91f2-cc59ecedb5e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwpmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwpmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:39:06Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-sc9qx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:50Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:50 crc kubenswrapper[4757]: I1006 13:39:50.989674 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:50 crc kubenswrapper[4757]: I1006 13:39:50.989730 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:50 crc kubenswrapper[4757]: I1006 13:39:50.989746 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:50 crc kubenswrapper[4757]: I1006 13:39:50.989765 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:50 crc kubenswrapper[4757]: I1006 13:39:50.989777 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:50Z","lastTransitionTime":"2025-10-06T13:39:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:51 crc kubenswrapper[4757]: I1006 13:39:51.092664 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:51 crc kubenswrapper[4757]: I1006 13:39:51.092921 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:51 crc kubenswrapper[4757]: I1006 13:39:51.092934 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:51 crc kubenswrapper[4757]: I1006 13:39:51.092955 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:51 crc kubenswrapper[4757]: I1006 13:39:51.092970 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:51Z","lastTransitionTime":"2025-10-06T13:39:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:51 crc kubenswrapper[4757]: I1006 13:39:51.179749 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 06 13:39:51 crc kubenswrapper[4757]: E1006 13:39:51.179942 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 06 13:39:51 crc kubenswrapper[4757]: I1006 13:39:51.196747 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:51 crc kubenswrapper[4757]: I1006 13:39:51.196822 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:51 crc kubenswrapper[4757]: I1006 13:39:51.196840 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:51 crc kubenswrapper[4757]: I1006 13:39:51.196864 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:51 crc kubenswrapper[4757]: I1006 13:39:51.196883 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:51Z","lastTransitionTime":"2025-10-06T13:39:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:51 crc kubenswrapper[4757]: I1006 13:39:51.300212 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:51 crc kubenswrapper[4757]: I1006 13:39:51.300304 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:51 crc kubenswrapper[4757]: I1006 13:39:51.300600 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:51 crc kubenswrapper[4757]: I1006 13:39:51.301041 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:51 crc kubenswrapper[4757]: I1006 13:39:51.301330 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:51Z","lastTransitionTime":"2025-10-06T13:39:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:51 crc kubenswrapper[4757]: I1006 13:39:51.404965 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:51 crc kubenswrapper[4757]: I1006 13:39:51.405026 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:51 crc kubenswrapper[4757]: I1006 13:39:51.405043 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:51 crc kubenswrapper[4757]: I1006 13:39:51.405068 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:51 crc kubenswrapper[4757]: I1006 13:39:51.405085 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:51Z","lastTransitionTime":"2025-10-06T13:39:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:51 crc kubenswrapper[4757]: I1006 13:39:51.507903 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:51 crc kubenswrapper[4757]: I1006 13:39:51.507965 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:51 crc kubenswrapper[4757]: I1006 13:39:51.507980 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:51 crc kubenswrapper[4757]: I1006 13:39:51.508002 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:51 crc kubenswrapper[4757]: I1006 13:39:51.508039 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:51Z","lastTransitionTime":"2025-10-06T13:39:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:51 crc kubenswrapper[4757]: I1006 13:39:51.609873 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:51 crc kubenswrapper[4757]: I1006 13:39:51.609902 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:51 crc kubenswrapper[4757]: I1006 13:39:51.609912 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:51 crc kubenswrapper[4757]: I1006 13:39:51.609924 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:51 crc kubenswrapper[4757]: I1006 13:39:51.609933 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:51Z","lastTransitionTime":"2025-10-06T13:39:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:51 crc kubenswrapper[4757]: I1006 13:39:51.647664 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-58bhb_a6624d05-e024-49f2-bf87-33e7ea4fccbb/ovnkube-controller/3.log" Oct 06 13:39:51 crc kubenswrapper[4757]: I1006 13:39:51.648300 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-58bhb_a6624d05-e024-49f2-bf87-33e7ea4fccbb/ovnkube-controller/2.log" Oct 06 13:39:51 crc kubenswrapper[4757]: I1006 13:39:51.651962 4757 generic.go:334] "Generic (PLEG): container finished" podID="a6624d05-e024-49f2-bf87-33e7ea4fccbb" containerID="c59b4f6d5b02fd4640ea7a51803ca4d31b0440d8779c46f342b1e5af2e8a2bf0" exitCode=1 Oct 06 13:39:51 crc kubenswrapper[4757]: I1006 13:39:51.652027 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" event={"ID":"a6624d05-e024-49f2-bf87-33e7ea4fccbb","Type":"ContainerDied","Data":"c59b4f6d5b02fd4640ea7a51803ca4d31b0440d8779c46f342b1e5af2e8a2bf0"} Oct 06 13:39:51 crc kubenswrapper[4757]: I1006 13:39:51.652064 4757 scope.go:117] "RemoveContainer" containerID="737f180b936ea2af92cdc2a3c77290ee6ad30c131c7fd3301562f96387c8a211" Oct 06 13:39:51 crc kubenswrapper[4757]: I1006 13:39:51.653320 4757 scope.go:117] "RemoveContainer" containerID="c59b4f6d5b02fd4640ea7a51803ca4d31b0440d8779c46f342b1e5af2e8a2bf0" Oct 06 13:39:51 crc kubenswrapper[4757]: E1006 13:39:51.653624 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-58bhb_openshift-ovn-kubernetes(a6624d05-e024-49f2-bf87-33e7ea4fccbb)\"" pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" podUID="a6624d05-e024-49f2-bf87-33e7ea4fccbb" Oct 06 13:39:51 crc kubenswrapper[4757]: I1006 13:39:51.669065 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9j5jn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de18b9fe-e396-469e-a6f6-d87ce91f3270\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4b5ed23b64e4bbc9f76f8cdef77d7f608d8ba029b540b69f64faf266f843155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2cslg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9j5jn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:51Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:51 crc kubenswrapper[4757]: I1006 13:39:51.684006 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70151c959d98e2cb056e57e50b67c68b1c031beb375b357c8941bc9b81dc040f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:51Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:51 crc kubenswrapper[4757]: I1006 13:39:51.701928 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:51Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:51 crc kubenswrapper[4757]: I1006 13:39:51.713407 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:51 crc kubenswrapper[4757]: I1006 13:39:51.713489 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:51 crc kubenswrapper[4757]: I1006 13:39:51.713507 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:51 crc kubenswrapper[4757]: I1006 13:39:51.713534 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:51 crc kubenswrapper[4757]: I1006 13:39:51.713652 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:51Z","lastTransitionTime":"2025-10-06T13:39:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:51 crc kubenswrapper[4757]: I1006 13:39:51.729377 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6624d05-e024-49f2-bf87-33e7ea4fccbb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbe06e8892901366539846fa68ea4e379c32ca7b21843383dc4a72ac70f92b98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75c8038f49690378c7322e4d2e50f7d5073bb744c3f64317b7c3f4acffaa338d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://df2b012b79afa6379b4a7add72a9093bbdbfcc8d618d6eae350a773fdddacfa8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4093bde478f4910c2b1e2341bb0b3f640454b131e31b9ab7ad5900b8260b7561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1580d51cb7ec22f70787157dc80236a312eca77785329c663d78c02ee9a0685d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38df7deb98a16e465d5fdbad8e04c2e62c5cfb418a48fe9f01da335dba2907ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c59b4f6d5b02fd4640ea7a51803ca4d31b0440d8779c46f342b1e5af2e8a2bf0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://737f180b936ea2af92cdc2a3c77290ee6ad30c131c7fd3301562f96387c8a211\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-06T13:39:20Z\\\",\\\"message\\\":\\\"e (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1006 13:39:20.135763 6389 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1006 13:39:20.135790 6389 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1006 13:39:20.135830 6389 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1006 13:39:20.135845 6389 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1006 13:39:20.135848 6389 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1006 13:39:20.135872 6389 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1006 13:39:20.135873 6389 handler.go:208] Removed *v1.Node event handler 2\\\\nI1006 13:39:20.135888 6389 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1006 13:39:20.135916 6389 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1006 13:39:20.135920 6389 handler.go:208] Removed *v1.Node event handler 7\\\\nI1006 13:39:20.135944 6389 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1006 13:39:20.135983 6389 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1006 13:39:20.135991 6389 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1006 13:39:20.135996 6389 factory.go:656] Stopping watch factory\\\\nI1006 13:39:20.136011 6389 handler.go:208] Removed *v1.EgressFirewall ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-06T13:39:19Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c59b4f6d5b02fd4640ea7a51803ca4d31b0440d8779c46f342b1e5af2e8a2bf0\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-06T13:39:50Z\\\",\\\"message\\\":\\\"e openshift-kube-apiserver-operator/metrics LB cluster-wide configs for network=default: []services.lbConfig{services.lbConfig{vips:[]string{\\\\\\\"10.217.5.109\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:443, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}}\\\\nI1006 13:39:50.965729 6757 services_controller.go:444] Built service openshift-kube-apiserver-operator/metrics LB per-node configs for network=default: []services.lbConfig(nil)\\\\nI1006 13:39:50.965740 6757 services_controller.go:445] Built service openshift-kube-apiserver-operator/metrics LB template configs for network=default: []services.lbConfig(nil)\\\\nI1006 13:39:50.965689 6757 services_controller.go:454] Service openshift-machine-config-operator/machine-config-controller for network=default has 1 cluster-wide, 0 per-node configs, 0 template configs, making 1 (cluster) 0 (per node) and 0 (template) load balancers\\\\nF1006 13:39:50.965808 6757 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-06T13:39:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5852bc46ae07b295a97cca9f5ef7d06484c12744c0f6ee57b2d14acee319fcaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3fe5f1f425ddd770c518939dee061dc2641c1ca6c3538d8d30ac0724a6353936\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3fe5f1f425ddd770c518939dee061dc2641c1ca6c3538d8d30ac0724a6353936\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-58bhb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:51Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:51 crc kubenswrapper[4757]: I1006 13:39:51.749200 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-sc9qx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a0a24d2-8946-4710-91f2-cc59ecedb5e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwpmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwpmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:39:06Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-sc9qx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:51Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:51 crc kubenswrapper[4757]: I1006 13:39:51.770069 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:51Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:51 crc kubenswrapper[4757]: I1006 13:39:51.787168 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d100743e6a2245c99a227c3380c1e9cbdfec0eff361d21977a3e313fa8f7bfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:51Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:51 crc kubenswrapper[4757]: I1006 13:39:51.809911 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aab0f26-c4f7-40b2-892f-0c7d8586a1c1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4119269e9f663bc5c38f4fdc8a5b32f1c3d8446fed6fd36172c41207c9c287b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1bef054ed2b7c27f200f7ab089158ba9946ca17b507c85c377504f539812fcf6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb90630132db1396d9009fe35d6d6b8ab6afe59d32f196cf7a10f3179103227\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://123bbe58d8664c8fad1b86c9c321228f7ffe7cdf14c44e1d9be7802e10a3f0d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://813c083e0e8ba3b397a4b2795de1eac82299d7f673832272138f20162b7f2db2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-06T13:38:46Z\\\",\\\"message\\\":\\\"W1006 13:38:35.655009 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1006 13:38:35.655588 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759757915 cert, and key in /tmp/serving-cert-2033919833/serving-signer.crt, /tmp/serving-cert-2033919833/serving-signer.key\\\\nI1006 13:38:35.906575 1 observer_polling.go:159] Starting file observer\\\\nW1006 13:38:35.908632 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1006 13:38:35.908812 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1006 13:38:35.911572 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2033919833/tls.crt::/tmp/serving-cert-2033919833/tls.key\\\\\\\"\\\\nF1006 13:38:46.393904 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:35Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18af66d22d5905beeec34c4d158a88aadb1a8076e423996cd42e4172a31426e0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:35Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca584765a9838801f99c7fb1f7bbcc740e684033b1b19035ef95b72526c6eda1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca584765a9838801f99c7fb1f7bbcc740e684033b1b19035ef95b72526c6eda1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:32Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:51Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:51 crc kubenswrapper[4757]: I1006 13:39:51.822414 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:51 crc kubenswrapper[4757]: I1006 13:39:51.822453 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:51 crc kubenswrapper[4757]: I1006 13:39:51.822480 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:51 crc kubenswrapper[4757]: I1006 13:39:51.822495 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:51 crc kubenswrapper[4757]: I1006 13:39:51.822506 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:51Z","lastTransitionTime":"2025-10-06T13:39:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:51 crc kubenswrapper[4757]: I1006 13:39:51.825368 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0010c888-d5ad-4b2b-8309-1647fdf0dee3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a2028c8aaf6b1504a3b00edc15272958a5254f6e621734988e959fe0720f1ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kvf5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://151c97566cda63000d69b51293c84fad70c6e331d3a1b62bd3a03ee5732d5214\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kvf5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-7tb7h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:51Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:51 crc kubenswrapper[4757]: I1006 13:39:51.843582 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9qf7z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9144d9fd-70d7-4a29-8e6b-c020c611980a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec9c673c59529f08cb919af940f8384584309e4e113c65431a529b935d1df3f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7771512cbe14fa9b8d61be3a354a5fb8436b84ddf455ae4426440f1599f7ad90\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-06T13:39:39Z\\\",\\\"message\\\":\\\"2025-10-06T13:38:54+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_adb47832-9446-4ad4-96e7-a6a131816e54\\\\n2025-10-06T13:38:54+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_adb47832-9446-4ad4-96e7-a6a131816e54 to /host/opt/cni/bin/\\\\n2025-10-06T13:38:54Z [verbose] multus-daemon started\\\\n2025-10-06T13:38:54Z [verbose] Readiness Indicator file check\\\\n2025-10-06T13:39:39Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:39:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mf4fb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9qf7z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:51Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:51 crc kubenswrapper[4757]: I1006 13:39:51.858848 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rhrzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62d4cce6-0583-40a6-b7ea-2996d07b49b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://886c6f797531b702123d9931997b7b5e3ebcf9c14c566ff90e0382eae806f925\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a6cfbbedbad94b500e9bc7c5777459ffbf3db038e8a520a2c9c28d381955df55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a6cfbbedbad94b500e9bc7c5777459ffbf3db038e8a520a2c9c28d381955df55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8c16d3a08acde90ccf1beb5e9ced373395af7436ddbc05c24f6489f6f49fc75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8c16d3a08acde90ccf1beb5e9ced373395af7436ddbc05c24f6489f6f49fc75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7471a1d670fcee421cb43e0a8b8a2b5e15c9d428659f8d63be54895493225242\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7471a1d670fcee421cb43e0a8b8a2b5e15c9d428659f8d63be54895493225242\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d5889f0b8d556cbb5791eb0d9d5475c4fa8beb0c5e9614081c8a7002353282a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d5889f0b8d556cbb5791eb0d9d5475c4fa8beb0c5e9614081c8a7002353282a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0e664e1241d006c37ace1635ef93d30b3062960868874ab67b024d872e8b352\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0e664e1241d006c37ace1635ef93d30b3062960868874ab67b024d872e8b352\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4cbf946357ea3bc8763e1c1b16d8794976662c55043426132f1ece7c52dca82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4cbf946357ea3bc8763e1c1b16d8794976662c55043426132f1ece7c52dca82\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rhrzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:51Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:51 crc kubenswrapper[4757]: I1006 13:39:51.873368 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-j889t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a097c3c1-7cfd-4ba8-878d-40b971843e92\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cf3eee034e9bc6cc2729e4cd23569f535ea8d2058f1576f75baaa5cf8407f2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-29wtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:54Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-j889t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:51Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:51 crc kubenswrapper[4757]: I1006 13:39:51.886785 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l2rjj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"34af6a7e-1458-4f7c-bc54-69fb80966b6b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://53547fa1f6011c3003bed92bbd7200050777786dd1fe8b3979b8d9c695c7259d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:39:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5csnl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3d629cc4ee4a614d547fe962c1367b1717f185765e1ccc325fd941f37e3d656\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:39:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5csnl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:39:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-l2rjj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:51Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:51 crc kubenswrapper[4757]: I1006 13:39:51.898453 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cd6138e-cd0c-4140-8173-eeb737e5b20a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e5392ae609235f0d0526e4d115125c20a75af131b89c07ae6bfa3b4bac84108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d06bc2afab6df22e2c8140de3e6288a3e5f4190b6bdb31a2e71170275e70cae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://056d1a4bdaee6e39a0ba0105c9b48d23cd37c37936951e8e2aa123f6de93463d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e08edbfb6418e26eb6c3ef0d229a623bc625e8e20c29d41a95adc8ac8dc2534\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:32Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:51Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:51 crc kubenswrapper[4757]: I1006 13:39:51.910928 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c409c81-c170-4308-98b3-fb1cc65ff1bb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d9249418a973cd8c582a5f126d293b35071887e197f98c0c911823fc76e7ab90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a66ed2ef9cbfedf37b5bc458002c81c7d608f3bb904f5e7480d9874cfe29ada\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://69aec338eb12fbfcd83c476590f12c4a2b4aff507981052b00060900f88a26a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fbe7f72f01daeac82347a11112c1138e744a97c0df2d111fef60c8be750ce39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9fbe7f72f01daeac82347a11112c1138e744a97c0df2d111fef60c8be750ce39\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:32Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:51Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:51 crc kubenswrapper[4757]: I1006 13:39:51.922446 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b45680ebd17f15c56456a2c0eb5c7b58f6f42524be18df11dacbb3f4d904eac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6454ffe81c62fdfd6a5c0ee4aff48e55b1dd83b14b07a93f915fc3023f6bd255\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:51Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:51 crc kubenswrapper[4757]: I1006 13:39:51.930714 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:51 crc kubenswrapper[4757]: I1006 13:39:51.930766 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:51 crc kubenswrapper[4757]: I1006 13:39:51.930779 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:51 crc kubenswrapper[4757]: I1006 13:39:51.930797 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:51 crc kubenswrapper[4757]: I1006 13:39:51.930810 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:51Z","lastTransitionTime":"2025-10-06T13:39:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:51 crc kubenswrapper[4757]: I1006 13:39:51.938442 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:51Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:52 crc kubenswrapper[4757]: I1006 13:39:52.032999 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:52 crc kubenswrapper[4757]: I1006 13:39:52.033063 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:52 crc kubenswrapper[4757]: I1006 13:39:52.033075 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:52 crc kubenswrapper[4757]: I1006 13:39:52.033105 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:52 crc kubenswrapper[4757]: I1006 13:39:52.033119 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:52Z","lastTransitionTime":"2025-10-06T13:39:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:52 crc kubenswrapper[4757]: I1006 13:39:52.136372 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:52 crc kubenswrapper[4757]: I1006 13:39:52.136436 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:52 crc kubenswrapper[4757]: I1006 13:39:52.136445 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:52 crc kubenswrapper[4757]: I1006 13:39:52.136463 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:52 crc kubenswrapper[4757]: I1006 13:39:52.136473 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:52Z","lastTransitionTime":"2025-10-06T13:39:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:52 crc kubenswrapper[4757]: I1006 13:39:52.179483 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 06 13:39:52 crc kubenswrapper[4757]: E1006 13:39:52.179669 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 06 13:39:52 crc kubenswrapper[4757]: I1006 13:39:52.179997 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-sc9qx" Oct 06 13:39:52 crc kubenswrapper[4757]: I1006 13:39:52.180136 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 06 13:39:52 crc kubenswrapper[4757]: E1006 13:39:52.180310 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-sc9qx" podUID="8a0a24d2-8946-4710-91f2-cc59ecedb5e3" Oct 06 13:39:52 crc kubenswrapper[4757]: E1006 13:39:52.180446 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 06 13:39:52 crc kubenswrapper[4757]: I1006 13:39:52.205080 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b45680ebd17f15c56456a2c0eb5c7b58f6f42524be18df11dacbb3f4d904eac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6454ffe81c62fdfd6a5c0ee4aff48e55b1dd83b14b07a93f915fc3023f6bd255\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:52Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:52 crc kubenswrapper[4757]: I1006 13:39:52.226948 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:52Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:52 crc kubenswrapper[4757]: I1006 13:39:52.239429 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:52 crc kubenswrapper[4757]: I1006 13:39:52.239499 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:52 crc kubenswrapper[4757]: I1006 13:39:52.239526 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:52 crc kubenswrapper[4757]: I1006 13:39:52.239556 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:52 crc kubenswrapper[4757]: I1006 13:39:52.239581 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:52Z","lastTransitionTime":"2025-10-06T13:39:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:52 crc kubenswrapper[4757]: I1006 13:39:52.245824 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:52Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:52 crc kubenswrapper[4757]: I1006 13:39:52.271166 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6624d05-e024-49f2-bf87-33e7ea4fccbb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbe06e8892901366539846fa68ea4e379c32ca7b21843383dc4a72ac70f92b98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75c8038f49690378c7322e4d2e50f7d5073bb744c3f64317b7c3f4acffaa338d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://df2b012b79afa6379b4a7add72a9093bbdbfcc8d618d6eae350a773fdddacfa8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4093bde478f4910c2b1e2341bb0b3f640454b131e31b9ab7ad5900b8260b7561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1580d51cb7ec22f70787157dc80236a312eca77785329c663d78c02ee9a0685d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38df7deb98a16e465d5fdbad8e04c2e62c5cfb418a48fe9f01da335dba2907ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c59b4f6d5b02fd4640ea7a51803ca4d31b0440d8779c46f342b1e5af2e8a2bf0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://737f180b936ea2af92cdc2a3c77290ee6ad30c131c7fd3301562f96387c8a211\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-06T13:39:20Z\\\",\\\"message\\\":\\\"e (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1006 13:39:20.135763 6389 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1006 13:39:20.135790 6389 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1006 13:39:20.135830 6389 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1006 13:39:20.135845 6389 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1006 13:39:20.135848 6389 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1006 13:39:20.135872 6389 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1006 13:39:20.135873 6389 handler.go:208] Removed *v1.Node event handler 2\\\\nI1006 13:39:20.135888 6389 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1006 13:39:20.135916 6389 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1006 13:39:20.135920 6389 handler.go:208] Removed *v1.Node event handler 7\\\\nI1006 13:39:20.135944 6389 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1006 13:39:20.135983 6389 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1006 13:39:20.135991 6389 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1006 13:39:20.135996 6389 factory.go:656] Stopping watch factory\\\\nI1006 13:39:20.136011 6389 handler.go:208] Removed *v1.EgressFirewall ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-06T13:39:19Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c59b4f6d5b02fd4640ea7a51803ca4d31b0440d8779c46f342b1e5af2e8a2bf0\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-06T13:39:50Z\\\",\\\"message\\\":\\\"e openshift-kube-apiserver-operator/metrics LB cluster-wide configs for network=default: []services.lbConfig{services.lbConfig{vips:[]string{\\\\\\\"10.217.5.109\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:443, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}}\\\\nI1006 13:39:50.965729 6757 services_controller.go:444] Built service openshift-kube-apiserver-operator/metrics LB per-node configs for network=default: []services.lbConfig(nil)\\\\nI1006 13:39:50.965740 6757 services_controller.go:445] Built service openshift-kube-apiserver-operator/metrics LB template configs for network=default: []services.lbConfig(nil)\\\\nI1006 13:39:50.965689 6757 services_controller.go:454] Service openshift-machine-config-operator/machine-config-controller for network=default has 1 cluster-wide, 0 per-node configs, 0 template configs, making 1 (cluster) 0 (per node) and 0 (template) load balancers\\\\nF1006 13:39:50.965808 6757 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-06T13:39:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5852bc46ae07b295a97cca9f5ef7d06484c12744c0f6ee57b2d14acee319fcaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3fe5f1f425ddd770c518939dee061dc2641c1ca6c3538d8d30ac0724a6353936\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3fe5f1f425ddd770c518939dee061dc2641c1ca6c3538d8d30ac0724a6353936\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-58bhb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:52Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:52 crc kubenswrapper[4757]: I1006 13:39:52.290345 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-sc9qx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a0a24d2-8946-4710-91f2-cc59ecedb5e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwpmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwpmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:39:06Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-sc9qx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:52Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:52 crc kubenswrapper[4757]: I1006 13:39:52.303355 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:52Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:52 crc kubenswrapper[4757]: I1006 13:39:52.315683 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9j5jn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de18b9fe-e396-469e-a6f6-d87ce91f3270\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4b5ed23b64e4bbc9f76f8cdef77d7f608d8ba029b540b69f64faf266f843155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2cslg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9j5jn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:52Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:52 crc kubenswrapper[4757]: I1006 13:39:52.327991 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70151c959d98e2cb056e57e50b67c68b1c031beb375b357c8941bc9b81dc040f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:52Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:52 crc kubenswrapper[4757]: I1006 13:39:52.343216 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:52 crc kubenswrapper[4757]: I1006 13:39:52.343257 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:52 crc kubenswrapper[4757]: I1006 13:39:52.343266 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:52 crc kubenswrapper[4757]: I1006 13:39:52.343283 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:52 crc kubenswrapper[4757]: I1006 13:39:52.343293 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:52Z","lastTransitionTime":"2025-10-06T13:39:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:52 crc kubenswrapper[4757]: I1006 13:39:52.343996 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aab0f26-c4f7-40b2-892f-0c7d8586a1c1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4119269e9f663bc5c38f4fdc8a5b32f1c3d8446fed6fd36172c41207c9c287b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1bef054ed2b7c27f200f7ab089158ba9946ca17b507c85c377504f539812fcf6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb90630132db1396d9009fe35d6d6b8ab6afe59d32f196cf7a10f3179103227\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://123bbe58d8664c8fad1b86c9c321228f7ffe7cdf14c44e1d9be7802e10a3f0d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://813c083e0e8ba3b397a4b2795de1eac82299d7f673832272138f20162b7f2db2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-06T13:38:46Z\\\",\\\"message\\\":\\\"W1006 13:38:35.655009 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1006 13:38:35.655588 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759757915 cert, and key in /tmp/serving-cert-2033919833/serving-signer.crt, /tmp/serving-cert-2033919833/serving-signer.key\\\\nI1006 13:38:35.906575 1 observer_polling.go:159] Starting file observer\\\\nW1006 13:38:35.908632 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1006 13:38:35.908812 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1006 13:38:35.911572 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2033919833/tls.crt::/tmp/serving-cert-2033919833/tls.key\\\\\\\"\\\\nF1006 13:38:46.393904 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:35Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18af66d22d5905beeec34c4d158a88aadb1a8076e423996cd42e4172a31426e0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:35Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca584765a9838801f99c7fb1f7bbcc740e684033b1b19035ef95b72526c6eda1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca584765a9838801f99c7fb1f7bbcc740e684033b1b19035ef95b72526c6eda1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:32Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:52Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:52 crc kubenswrapper[4757]: I1006 13:39:52.364177 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d100743e6a2245c99a227c3380c1e9cbdfec0eff361d21977a3e313fa8f7bfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:52Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:52 crc kubenswrapper[4757]: I1006 13:39:52.383776 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rhrzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62d4cce6-0583-40a6-b7ea-2996d07b49b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://886c6f797531b702123d9931997b7b5e3ebcf9c14c566ff90e0382eae806f925\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a6cfbbedbad94b500e9bc7c5777459ffbf3db038e8a520a2c9c28d381955df55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a6cfbbedbad94b500e9bc7c5777459ffbf3db038e8a520a2c9c28d381955df55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8c16d3a08acde90ccf1beb5e9ced373395af7436ddbc05c24f6489f6f49fc75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8c16d3a08acde90ccf1beb5e9ced373395af7436ddbc05c24f6489f6f49fc75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7471a1d670fcee421cb43e0a8b8a2b5e15c9d428659f8d63be54895493225242\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7471a1d670fcee421cb43e0a8b8a2b5e15c9d428659f8d63be54895493225242\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d5889f0b8d556cbb5791eb0d9d5475c4fa8beb0c5e9614081c8a7002353282a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d5889f0b8d556cbb5791eb0d9d5475c4fa8beb0c5e9614081c8a7002353282a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0e664e1241d006c37ace1635ef93d30b3062960868874ab67b024d872e8b352\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0e664e1241d006c37ace1635ef93d30b3062960868874ab67b024d872e8b352\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4cbf946357ea3bc8763e1c1b16d8794976662c55043426132f1ece7c52dca82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4cbf946357ea3bc8763e1c1b16d8794976662c55043426132f1ece7c52dca82\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rhrzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:52Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:52 crc kubenswrapper[4757]: I1006 13:39:52.397410 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-j889t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a097c3c1-7cfd-4ba8-878d-40b971843e92\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cf3eee034e9bc6cc2729e4cd23569f535ea8d2058f1576f75baaa5cf8407f2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-29wtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:54Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-j889t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:52Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:52 crc kubenswrapper[4757]: I1006 13:39:52.410784 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l2rjj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"34af6a7e-1458-4f7c-bc54-69fb80966b6b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://53547fa1f6011c3003bed92bbd7200050777786dd1fe8b3979b8d9c695c7259d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:39:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5csnl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3d629cc4ee4a614d547fe962c1367b1717f185765e1ccc325fd941f37e3d656\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:39:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5csnl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:39:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-l2rjj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:52Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:52 crc kubenswrapper[4757]: I1006 13:39:52.425731 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cd6138e-cd0c-4140-8173-eeb737e5b20a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e5392ae609235f0d0526e4d115125c20a75af131b89c07ae6bfa3b4bac84108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d06bc2afab6df22e2c8140de3e6288a3e5f4190b6bdb31a2e71170275e70cae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://056d1a4bdaee6e39a0ba0105c9b48d23cd37c37936951e8e2aa123f6de93463d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e08edbfb6418e26eb6c3ef0d229a623bc625e8e20c29d41a95adc8ac8dc2534\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:32Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:52Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:52 crc kubenswrapper[4757]: I1006 13:39:52.442454 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c409c81-c170-4308-98b3-fb1cc65ff1bb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d9249418a973cd8c582a5f126d293b35071887e197f98c0c911823fc76e7ab90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a66ed2ef9cbfedf37b5bc458002c81c7d608f3bb904f5e7480d9874cfe29ada\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://69aec338eb12fbfcd83c476590f12c4a2b4aff507981052b00060900f88a26a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fbe7f72f01daeac82347a11112c1138e744a97c0df2d111fef60c8be750ce39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9fbe7f72f01daeac82347a11112c1138e744a97c0df2d111fef60c8be750ce39\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:32Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:52Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:52 crc kubenswrapper[4757]: I1006 13:39:52.446451 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:52 crc kubenswrapper[4757]: I1006 13:39:52.446497 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:52 crc kubenswrapper[4757]: I1006 13:39:52.446513 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:52 crc kubenswrapper[4757]: I1006 13:39:52.446532 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:52 crc kubenswrapper[4757]: I1006 13:39:52.446549 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:52Z","lastTransitionTime":"2025-10-06T13:39:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:52 crc kubenswrapper[4757]: I1006 13:39:52.457630 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0010c888-d5ad-4b2b-8309-1647fdf0dee3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a2028c8aaf6b1504a3b00edc15272958a5254f6e621734988e959fe0720f1ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kvf5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://151c97566cda63000d69b51293c84fad70c6e331d3a1b62bd3a03ee5732d5214\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kvf5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-7tb7h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:52Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:52 crc kubenswrapper[4757]: I1006 13:39:52.476735 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9qf7z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9144d9fd-70d7-4a29-8e6b-c020c611980a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec9c673c59529f08cb919af940f8384584309e4e113c65431a529b935d1df3f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7771512cbe14fa9b8d61be3a354a5fb8436b84ddf455ae4426440f1599f7ad90\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-06T13:39:39Z\\\",\\\"message\\\":\\\"2025-10-06T13:38:54+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_adb47832-9446-4ad4-96e7-a6a131816e54\\\\n2025-10-06T13:38:54+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_adb47832-9446-4ad4-96e7-a6a131816e54 to /host/opt/cni/bin/\\\\n2025-10-06T13:38:54Z [verbose] multus-daemon started\\\\n2025-10-06T13:38:54Z [verbose] Readiness Indicator file check\\\\n2025-10-06T13:39:39Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:39:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mf4fb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9qf7z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:52Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:52 crc kubenswrapper[4757]: I1006 13:39:52.548967 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:52 crc kubenswrapper[4757]: I1006 13:39:52.549021 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:52 crc kubenswrapper[4757]: I1006 13:39:52.549032 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:52 crc kubenswrapper[4757]: I1006 13:39:52.549081 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:52 crc kubenswrapper[4757]: I1006 13:39:52.549124 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:52Z","lastTransitionTime":"2025-10-06T13:39:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:52 crc kubenswrapper[4757]: I1006 13:39:52.656973 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:52 crc kubenswrapper[4757]: I1006 13:39:52.657068 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:52 crc kubenswrapper[4757]: I1006 13:39:52.657090 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:52 crc kubenswrapper[4757]: I1006 13:39:52.657148 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:52 crc kubenswrapper[4757]: I1006 13:39:52.657174 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:52Z","lastTransitionTime":"2025-10-06T13:39:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:52 crc kubenswrapper[4757]: I1006 13:39:52.662248 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-58bhb_a6624d05-e024-49f2-bf87-33e7ea4fccbb/ovnkube-controller/3.log" Oct 06 13:39:52 crc kubenswrapper[4757]: I1006 13:39:52.669414 4757 scope.go:117] "RemoveContainer" containerID="c59b4f6d5b02fd4640ea7a51803ca4d31b0440d8779c46f342b1e5af2e8a2bf0" Oct 06 13:39:52 crc kubenswrapper[4757]: E1006 13:39:52.669727 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-58bhb_openshift-ovn-kubernetes(a6624d05-e024-49f2-bf87-33e7ea4fccbb)\"" pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" podUID="a6624d05-e024-49f2-bf87-33e7ea4fccbb" Oct 06 13:39:52 crc kubenswrapper[4757]: I1006 13:39:52.684273 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cd6138e-cd0c-4140-8173-eeb737e5b20a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e5392ae609235f0d0526e4d115125c20a75af131b89c07ae6bfa3b4bac84108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d06bc2afab6df22e2c8140de3e6288a3e5f4190b6bdb31a2e71170275e70cae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://056d1a4bdaee6e39a0ba0105c9b48d23cd37c37936951e8e2aa123f6de93463d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e08edbfb6418e26eb6c3ef0d229a623bc625e8e20c29d41a95adc8ac8dc2534\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:32Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:52Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:52 crc kubenswrapper[4757]: I1006 13:39:52.701637 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c409c81-c170-4308-98b3-fb1cc65ff1bb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d9249418a973cd8c582a5f126d293b35071887e197f98c0c911823fc76e7ab90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a66ed2ef9cbfedf37b5bc458002c81c7d608f3bb904f5e7480d9874cfe29ada\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://69aec338eb12fbfcd83c476590f12c4a2b4aff507981052b00060900f88a26a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9fbe7f72f01daeac82347a11112c1138e744a97c0df2d111fef60c8be750ce39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9fbe7f72f01daeac82347a11112c1138e744a97c0df2d111fef60c8be750ce39\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:32Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:52Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:52 crc kubenswrapper[4757]: I1006 13:39:52.715864 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0010c888-d5ad-4b2b-8309-1647fdf0dee3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a2028c8aaf6b1504a3b00edc15272958a5254f6e621734988e959fe0720f1ea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kvf5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://151c97566cda63000d69b51293c84fad70c6e331d3a1b62bd3a03ee5732d5214\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kvf5t\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-7tb7h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:52Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:52 crc kubenswrapper[4757]: I1006 13:39:52.736177 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-9qf7z" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9144d9fd-70d7-4a29-8e6b-c020c611980a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec9c673c59529f08cb919af940f8384584309e4e113c65431a529b935d1df3f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7771512cbe14fa9b8d61be3a354a5fb8436b84ddf455ae4426440f1599f7ad90\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-06T13:39:39Z\\\",\\\"message\\\":\\\"2025-10-06T13:38:54+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_adb47832-9446-4ad4-96e7-a6a131816e54\\\\n2025-10-06T13:38:54+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_adb47832-9446-4ad4-96e7-a6a131816e54 to /host/opt/cni/bin/\\\\n2025-10-06T13:38:54Z [verbose] multus-daemon started\\\\n2025-10-06T13:38:54Z [verbose] Readiness Indicator file check\\\\n2025-10-06T13:39:39Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:39:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mf4fb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-9qf7z\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:52Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:52 crc kubenswrapper[4757]: I1006 13:39:52.754574 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rhrzr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62d4cce6-0583-40a6-b7ea-2996d07b49b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://886c6f797531b702123d9931997b7b5e3ebcf9c14c566ff90e0382eae806f925\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a6cfbbedbad94b500e9bc7c5777459ffbf3db038e8a520a2c9c28d381955df55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a6cfbbedbad94b500e9bc7c5777459ffbf3db038e8a520a2c9c28d381955df55\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8c16d3a08acde90ccf1beb5e9ced373395af7436ddbc05c24f6489f6f49fc75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8c16d3a08acde90ccf1beb5e9ced373395af7436ddbc05c24f6489f6f49fc75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7471a1d670fcee421cb43e0a8b8a2b5e15c9d428659f8d63be54895493225242\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7471a1d670fcee421cb43e0a8b8a2b5e15c9d428659f8d63be54895493225242\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d5889f0b8d556cbb5791eb0d9d5475c4fa8beb0c5e9614081c8a7002353282a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d5889f0b8d556cbb5791eb0d9d5475c4fa8beb0c5e9614081c8a7002353282a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0e664e1241d006c37ace1635ef93d30b3062960868874ab67b024d872e8b352\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0e664e1241d006c37ace1635ef93d30b3062960868874ab67b024d872e8b352\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d4cbf946357ea3bc8763e1c1b16d8794976662c55043426132f1ece7c52dca82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4cbf946357ea3bc8763e1c1b16d8794976662c55043426132f1ece7c52dca82\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-brz4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rhrzr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:52Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:52 crc kubenswrapper[4757]: I1006 13:39:52.761488 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:52 crc kubenswrapper[4757]: I1006 13:39:52.761539 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:52 crc kubenswrapper[4757]: I1006 13:39:52.761553 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:52 crc kubenswrapper[4757]: I1006 13:39:52.761571 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:52 crc kubenswrapper[4757]: I1006 13:39:52.761584 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:52Z","lastTransitionTime":"2025-10-06T13:39:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:52 crc kubenswrapper[4757]: I1006 13:39:52.769680 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-j889t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a097c3c1-7cfd-4ba8-878d-40b971843e92\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cf3eee034e9bc6cc2729e4cd23569f535ea8d2058f1576f75baaa5cf8407f2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-29wtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:54Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-j889t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:52Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:52 crc kubenswrapper[4757]: I1006 13:39:52.788288 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l2rjj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"34af6a7e-1458-4f7c-bc54-69fb80966b6b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://53547fa1f6011c3003bed92bbd7200050777786dd1fe8b3979b8d9c695c7259d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:39:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5csnl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3d629cc4ee4a614d547fe962c1367b1717f185765e1ccc325fd941f37e3d656\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:39:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5csnl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:39:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-l2rjj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:52Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:52 crc kubenswrapper[4757]: I1006 13:39:52.804007 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b45680ebd17f15c56456a2c0eb5c7b58f6f42524be18df11dacbb3f4d904eac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6454ffe81c62fdfd6a5c0ee4aff48e55b1dd83b14b07a93f915fc3023f6bd255\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:52Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:52 crc kubenswrapper[4757]: I1006 13:39:52.823266 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:52Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:52 crc kubenswrapper[4757]: I1006 13:39:52.841072 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:52Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:52 crc kubenswrapper[4757]: I1006 13:39:52.857229 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-9j5jn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"de18b9fe-e396-469e-a6f6-d87ce91f3270\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e4b5ed23b64e4bbc9f76f8cdef77d7f608d8ba029b540b69f64faf266f843155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2cslg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:52Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-9j5jn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:52Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:52 crc kubenswrapper[4757]: I1006 13:39:52.865415 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:52 crc kubenswrapper[4757]: I1006 13:39:52.865480 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:52 crc kubenswrapper[4757]: I1006 13:39:52.865501 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:52 crc kubenswrapper[4757]: I1006 13:39:52.865525 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:52 crc kubenswrapper[4757]: I1006 13:39:52.865546 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:52Z","lastTransitionTime":"2025-10-06T13:39:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:52 crc kubenswrapper[4757]: I1006 13:39:52.872145 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70151c959d98e2cb056e57e50b67c68b1c031beb375b357c8941bc9b81dc040f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:52Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:52 crc kubenswrapper[4757]: I1006 13:39:52.892676 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:52Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:52Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:52 crc kubenswrapper[4757]: I1006 13:39:52.924582 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6624d05-e024-49f2-bf87-33e7ea4fccbb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbe06e8892901366539846fa68ea4e379c32ca7b21843383dc4a72ac70f92b98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75c8038f49690378c7322e4d2e50f7d5073bb744c3f64317b7c3f4acffaa338d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://df2b012b79afa6379b4a7add72a9093bbdbfcc8d618d6eae350a773fdddacfa8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4093bde478f4910c2b1e2341bb0b3f640454b131e31b9ab7ad5900b8260b7561\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1580d51cb7ec22f70787157dc80236a312eca77785329c663d78c02ee9a0685d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://38df7deb98a16e465d5fdbad8e04c2e62c5cfb418a48fe9f01da335dba2907ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c59b4f6d5b02fd4640ea7a51803ca4d31b0440d8779c46f342b1e5af2e8a2bf0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c59b4f6d5b02fd4640ea7a51803ca4d31b0440d8779c46f342b1e5af2e8a2bf0\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-06T13:39:50Z\\\",\\\"message\\\":\\\"e openshift-kube-apiserver-operator/metrics LB cluster-wide configs for network=default: []services.lbConfig{services.lbConfig{vips:[]string{\\\\\\\"10.217.5.109\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:443, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}}\\\\nI1006 13:39:50.965729 6757 services_controller.go:444] Built service openshift-kube-apiserver-operator/metrics LB per-node configs for network=default: []services.lbConfig(nil)\\\\nI1006 13:39:50.965740 6757 services_controller.go:445] Built service openshift-kube-apiserver-operator/metrics LB template configs for network=default: []services.lbConfig(nil)\\\\nI1006 13:39:50.965689 6757 services_controller.go:454] Service openshift-machine-config-operator/machine-config-controller for network=default has 1 cluster-wide, 0 per-node configs, 0 template configs, making 1 (cluster) 0 (per node) and 0 (template) load balancers\\\\nF1006 13:39:50.965808 6757 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-06T13:39:50Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-58bhb_openshift-ovn-kubernetes(a6624d05-e024-49f2-bf87-33e7ea4fccbb)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5852bc46ae07b295a97cca9f5ef7d06484c12744c0f6ee57b2d14acee319fcaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3fe5f1f425ddd770c518939dee061dc2641c1ca6c3538d8d30ac0724a6353936\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3fe5f1f425ddd770c518939dee061dc2641c1ca6c3538d8d30ac0724a6353936\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdf96\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:53Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-58bhb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:52Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:52 crc kubenswrapper[4757]: I1006 13:39:52.943796 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-sc9qx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a0a24d2-8946-4710-91f2-cc59ecedb5e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:06Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwpmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwpmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:39:06Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-sc9qx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:52Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:52 crc kubenswrapper[4757]: I1006 13:39:52.965058 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aab0f26-c4f7-40b2-892f-0c7d8586a1c1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:39:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4119269e9f663bc5c38f4fdc8a5b32f1c3d8446fed6fd36172c41207c9c287b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1bef054ed2b7c27f200f7ab089158ba9946ca17b507c85c377504f539812fcf6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://beb90630132db1396d9009fe35d6d6b8ab6afe59d32f196cf7a10f3179103227\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://123bbe58d8664c8fad1b86c9c321228f7ffe7cdf14c44e1d9be7802e10a3f0d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://813c083e0e8ba3b397a4b2795de1eac82299d7f673832272138f20162b7f2db2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-06T13:38:46Z\\\",\\\"message\\\":\\\"W1006 13:38:35.655009 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1006 13:38:35.655588 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759757915 cert, and key in /tmp/serving-cert-2033919833/serving-signer.crt, /tmp/serving-cert-2033919833/serving-signer.key\\\\nI1006 13:38:35.906575 1 observer_polling.go:159] Starting file observer\\\\nW1006 13:38:35.908632 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1006 13:38:35.908812 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1006 13:38:35.911572 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2033919833/tls.crt::/tmp/serving-cert-2033919833/tls.key\\\\\\\"\\\\nF1006 13:38:46.393904 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:35Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18af66d22d5905beeec34c4d158a88aadb1a8076e423996cd42e4172a31426e0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:35Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca584765a9838801f99c7fb1f7bbcc740e684033b1b19035ef95b72526c6eda1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca584765a9838801f99c7fb1f7bbcc740e684033b1b19035ef95b72526c6eda1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-06T13:38:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-06T13:38:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-06T13:38:32Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:52Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:52 crc kubenswrapper[4757]: I1006 13:39:52.969127 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:52 crc kubenswrapper[4757]: I1006 13:39:52.969222 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:52 crc kubenswrapper[4757]: I1006 13:39:52.969248 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:52 crc kubenswrapper[4757]: I1006 13:39:52.969278 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:52 crc kubenswrapper[4757]: I1006 13:39:52.969300 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:52Z","lastTransitionTime":"2025-10-06T13:39:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:52 crc kubenswrapper[4757]: I1006 13:39:52.983058 4757 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-06T13:38:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d100743e6a2245c99a227c3380c1e9cbdfec0eff361d21977a3e313fa8f7bfc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-06T13:38:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-06T13:39:52Z is after 2025-08-24T17:21:41Z" Oct 06 13:39:53 crc kubenswrapper[4757]: I1006 13:39:53.071982 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:53 crc kubenswrapper[4757]: I1006 13:39:53.072030 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:53 crc kubenswrapper[4757]: I1006 13:39:53.072040 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:53 crc kubenswrapper[4757]: I1006 13:39:53.072057 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:53 crc kubenswrapper[4757]: I1006 13:39:53.072068 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:53Z","lastTransitionTime":"2025-10-06T13:39:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:53 crc kubenswrapper[4757]: I1006 13:39:53.175874 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:53 crc kubenswrapper[4757]: I1006 13:39:53.176386 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:53 crc kubenswrapper[4757]: I1006 13:39:53.176566 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:53 crc kubenswrapper[4757]: I1006 13:39:53.176693 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:53 crc kubenswrapper[4757]: I1006 13:39:53.176822 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:53Z","lastTransitionTime":"2025-10-06T13:39:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:53 crc kubenswrapper[4757]: I1006 13:39:53.179431 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 06 13:39:53 crc kubenswrapper[4757]: E1006 13:39:53.179645 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 06 13:39:53 crc kubenswrapper[4757]: I1006 13:39:53.279787 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:53 crc kubenswrapper[4757]: I1006 13:39:53.279882 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:53 crc kubenswrapper[4757]: I1006 13:39:53.279902 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:53 crc kubenswrapper[4757]: I1006 13:39:53.279935 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:53 crc kubenswrapper[4757]: I1006 13:39:53.279960 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:53Z","lastTransitionTime":"2025-10-06T13:39:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:53 crc kubenswrapper[4757]: I1006 13:39:53.383515 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:53 crc kubenswrapper[4757]: I1006 13:39:53.384169 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:53 crc kubenswrapper[4757]: I1006 13:39:53.384370 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:53 crc kubenswrapper[4757]: I1006 13:39:53.384570 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:53 crc kubenswrapper[4757]: I1006 13:39:53.384829 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:53Z","lastTransitionTime":"2025-10-06T13:39:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:53 crc kubenswrapper[4757]: I1006 13:39:53.487993 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:53 crc kubenswrapper[4757]: I1006 13:39:53.488052 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:53 crc kubenswrapper[4757]: I1006 13:39:53.488061 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:53 crc kubenswrapper[4757]: I1006 13:39:53.488087 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:53 crc kubenswrapper[4757]: I1006 13:39:53.488120 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:53Z","lastTransitionTime":"2025-10-06T13:39:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:53 crc kubenswrapper[4757]: I1006 13:39:53.591381 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:53 crc kubenswrapper[4757]: I1006 13:39:53.591437 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:53 crc kubenswrapper[4757]: I1006 13:39:53.591449 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:53 crc kubenswrapper[4757]: I1006 13:39:53.591467 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:53 crc kubenswrapper[4757]: I1006 13:39:53.591480 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:53Z","lastTransitionTime":"2025-10-06T13:39:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:53 crc kubenswrapper[4757]: I1006 13:39:53.694279 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:53 crc kubenswrapper[4757]: I1006 13:39:53.694321 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:53 crc kubenswrapper[4757]: I1006 13:39:53.694335 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:53 crc kubenswrapper[4757]: I1006 13:39:53.694350 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:53 crc kubenswrapper[4757]: I1006 13:39:53.694361 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:53Z","lastTransitionTime":"2025-10-06T13:39:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:53 crc kubenswrapper[4757]: I1006 13:39:53.797472 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:53 crc kubenswrapper[4757]: I1006 13:39:53.797534 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:53 crc kubenswrapper[4757]: I1006 13:39:53.797546 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:53 crc kubenswrapper[4757]: I1006 13:39:53.797563 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:53 crc kubenswrapper[4757]: I1006 13:39:53.797575 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:53Z","lastTransitionTime":"2025-10-06T13:39:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:53 crc kubenswrapper[4757]: I1006 13:39:53.901072 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:53 crc kubenswrapper[4757]: I1006 13:39:53.901424 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:53 crc kubenswrapper[4757]: I1006 13:39:53.901563 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:53 crc kubenswrapper[4757]: I1006 13:39:53.901659 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:53 crc kubenswrapper[4757]: I1006 13:39:53.901753 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:53Z","lastTransitionTime":"2025-10-06T13:39:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:54 crc kubenswrapper[4757]: I1006 13:39:54.005364 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:54 crc kubenswrapper[4757]: I1006 13:39:54.005698 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:54 crc kubenswrapper[4757]: I1006 13:39:54.005801 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:54 crc kubenswrapper[4757]: I1006 13:39:54.005887 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:54 crc kubenswrapper[4757]: I1006 13:39:54.005965 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:54Z","lastTransitionTime":"2025-10-06T13:39:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:54 crc kubenswrapper[4757]: I1006 13:39:54.109506 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:54 crc kubenswrapper[4757]: I1006 13:39:54.109572 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:54 crc kubenswrapper[4757]: I1006 13:39:54.109590 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:54 crc kubenswrapper[4757]: I1006 13:39:54.109611 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:54 crc kubenswrapper[4757]: I1006 13:39:54.109626 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:54Z","lastTransitionTime":"2025-10-06T13:39:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:54 crc kubenswrapper[4757]: I1006 13:39:54.179824 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-sc9qx" Oct 06 13:39:54 crc kubenswrapper[4757]: I1006 13:39:54.179822 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 06 13:39:54 crc kubenswrapper[4757]: I1006 13:39:54.180926 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 06 13:39:54 crc kubenswrapper[4757]: E1006 13:39:54.181204 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-sc9qx" podUID="8a0a24d2-8946-4710-91f2-cc59ecedb5e3" Oct 06 13:39:54 crc kubenswrapper[4757]: E1006 13:39:54.181360 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 06 13:39:54 crc kubenswrapper[4757]: E1006 13:39:54.181490 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 06 13:39:54 crc kubenswrapper[4757]: I1006 13:39:54.215031 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:54 crc kubenswrapper[4757]: I1006 13:39:54.215085 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:54 crc kubenswrapper[4757]: I1006 13:39:54.215135 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:54 crc kubenswrapper[4757]: I1006 13:39:54.215164 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:54 crc kubenswrapper[4757]: I1006 13:39:54.215181 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:54Z","lastTransitionTime":"2025-10-06T13:39:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:54 crc kubenswrapper[4757]: I1006 13:39:54.318135 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:54 crc kubenswrapper[4757]: I1006 13:39:54.318182 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:54 crc kubenswrapper[4757]: I1006 13:39:54.318196 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:54 crc kubenswrapper[4757]: I1006 13:39:54.318212 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:54 crc kubenswrapper[4757]: I1006 13:39:54.318223 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:54Z","lastTransitionTime":"2025-10-06T13:39:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:54 crc kubenswrapper[4757]: I1006 13:39:54.420959 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:54 crc kubenswrapper[4757]: I1006 13:39:54.421028 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:54 crc kubenswrapper[4757]: I1006 13:39:54.421048 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:54 crc kubenswrapper[4757]: I1006 13:39:54.421078 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:54 crc kubenswrapper[4757]: I1006 13:39:54.421131 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:54Z","lastTransitionTime":"2025-10-06T13:39:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:54 crc kubenswrapper[4757]: I1006 13:39:54.524150 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:54 crc kubenswrapper[4757]: I1006 13:39:54.524204 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:54 crc kubenswrapper[4757]: I1006 13:39:54.524221 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:54 crc kubenswrapper[4757]: I1006 13:39:54.524244 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:54 crc kubenswrapper[4757]: I1006 13:39:54.524261 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:54Z","lastTransitionTime":"2025-10-06T13:39:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:54 crc kubenswrapper[4757]: I1006 13:39:54.627657 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:54 crc kubenswrapper[4757]: I1006 13:39:54.628003 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:54 crc kubenswrapper[4757]: I1006 13:39:54.628068 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:54 crc kubenswrapper[4757]: I1006 13:39:54.628202 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:54 crc kubenswrapper[4757]: I1006 13:39:54.628322 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:54Z","lastTransitionTime":"2025-10-06T13:39:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:54 crc kubenswrapper[4757]: I1006 13:39:54.731661 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:54 crc kubenswrapper[4757]: I1006 13:39:54.732288 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:54 crc kubenswrapper[4757]: I1006 13:39:54.732328 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:54 crc kubenswrapper[4757]: I1006 13:39:54.732350 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:54 crc kubenswrapper[4757]: I1006 13:39:54.732383 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:54Z","lastTransitionTime":"2025-10-06T13:39:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:54 crc kubenswrapper[4757]: I1006 13:39:54.835503 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:54 crc kubenswrapper[4757]: I1006 13:39:54.835557 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:54 crc kubenswrapper[4757]: I1006 13:39:54.835576 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:54 crc kubenswrapper[4757]: I1006 13:39:54.835601 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:54 crc kubenswrapper[4757]: I1006 13:39:54.835628 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:54Z","lastTransitionTime":"2025-10-06T13:39:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:54 crc kubenswrapper[4757]: I1006 13:39:54.938754 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:54 crc kubenswrapper[4757]: I1006 13:39:54.938823 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:54 crc kubenswrapper[4757]: I1006 13:39:54.938838 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:54 crc kubenswrapper[4757]: I1006 13:39:54.938858 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:54 crc kubenswrapper[4757]: I1006 13:39:54.938869 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:54Z","lastTransitionTime":"2025-10-06T13:39:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:55 crc kubenswrapper[4757]: I1006 13:39:55.041496 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:55 crc kubenswrapper[4757]: I1006 13:39:55.041539 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:55 crc kubenswrapper[4757]: I1006 13:39:55.041547 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:55 crc kubenswrapper[4757]: I1006 13:39:55.041560 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:55 crc kubenswrapper[4757]: I1006 13:39:55.041571 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:55Z","lastTransitionTime":"2025-10-06T13:39:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:55 crc kubenswrapper[4757]: I1006 13:39:55.145085 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:55 crc kubenswrapper[4757]: I1006 13:39:55.145141 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:55 crc kubenswrapper[4757]: I1006 13:39:55.145150 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:55 crc kubenswrapper[4757]: I1006 13:39:55.145165 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:55 crc kubenswrapper[4757]: I1006 13:39:55.145174 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:55Z","lastTransitionTime":"2025-10-06T13:39:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:55 crc kubenswrapper[4757]: I1006 13:39:55.179967 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 06 13:39:55 crc kubenswrapper[4757]: E1006 13:39:55.180207 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 06 13:39:55 crc kubenswrapper[4757]: I1006 13:39:55.247277 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:55 crc kubenswrapper[4757]: I1006 13:39:55.247338 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:55 crc kubenswrapper[4757]: I1006 13:39:55.247356 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:55 crc kubenswrapper[4757]: I1006 13:39:55.247385 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:55 crc kubenswrapper[4757]: I1006 13:39:55.247408 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:55Z","lastTransitionTime":"2025-10-06T13:39:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:55 crc kubenswrapper[4757]: I1006 13:39:55.349672 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:55 crc kubenswrapper[4757]: I1006 13:39:55.349979 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:55 crc kubenswrapper[4757]: I1006 13:39:55.350043 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:55 crc kubenswrapper[4757]: I1006 13:39:55.350127 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:55 crc kubenswrapper[4757]: I1006 13:39:55.350235 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:55Z","lastTransitionTime":"2025-10-06T13:39:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:55 crc kubenswrapper[4757]: I1006 13:39:55.452915 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:55 crc kubenswrapper[4757]: I1006 13:39:55.453426 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:55 crc kubenswrapper[4757]: I1006 13:39:55.453598 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:55 crc kubenswrapper[4757]: I1006 13:39:55.453774 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:55 crc kubenswrapper[4757]: I1006 13:39:55.453933 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:55Z","lastTransitionTime":"2025-10-06T13:39:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:55 crc kubenswrapper[4757]: I1006 13:39:55.557592 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:55 crc kubenswrapper[4757]: I1006 13:39:55.558013 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:55 crc kubenswrapper[4757]: I1006 13:39:55.558263 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:55 crc kubenswrapper[4757]: I1006 13:39:55.558425 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:55 crc kubenswrapper[4757]: I1006 13:39:55.558571 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:55Z","lastTransitionTime":"2025-10-06T13:39:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:55 crc kubenswrapper[4757]: I1006 13:39:55.661425 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:55 crc kubenswrapper[4757]: I1006 13:39:55.661492 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:55 crc kubenswrapper[4757]: I1006 13:39:55.661513 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:55 crc kubenswrapper[4757]: I1006 13:39:55.661540 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:55 crc kubenswrapper[4757]: I1006 13:39:55.661563 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:55Z","lastTransitionTime":"2025-10-06T13:39:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:55 crc kubenswrapper[4757]: I1006 13:39:55.764706 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:55 crc kubenswrapper[4757]: I1006 13:39:55.764768 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:55 crc kubenswrapper[4757]: I1006 13:39:55.764786 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:55 crc kubenswrapper[4757]: I1006 13:39:55.764810 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:55 crc kubenswrapper[4757]: I1006 13:39:55.764827 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:55Z","lastTransitionTime":"2025-10-06T13:39:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:55 crc kubenswrapper[4757]: I1006 13:39:55.867910 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:55 crc kubenswrapper[4757]: I1006 13:39:55.867974 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:55 crc kubenswrapper[4757]: I1006 13:39:55.867991 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:55 crc kubenswrapper[4757]: I1006 13:39:55.868013 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:55 crc kubenswrapper[4757]: I1006 13:39:55.868030 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:55Z","lastTransitionTime":"2025-10-06T13:39:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:55 crc kubenswrapper[4757]: I1006 13:39:55.970620 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:55 crc kubenswrapper[4757]: I1006 13:39:55.970690 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:55 crc kubenswrapper[4757]: I1006 13:39:55.970716 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:55 crc kubenswrapper[4757]: I1006 13:39:55.970747 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:55 crc kubenswrapper[4757]: I1006 13:39:55.970769 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:55Z","lastTransitionTime":"2025-10-06T13:39:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:56 crc kubenswrapper[4757]: I1006 13:39:56.036863 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 06 13:39:56 crc kubenswrapper[4757]: I1006 13:39:56.036965 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 06 13:39:56 crc kubenswrapper[4757]: E1006 13:39:56.037051 4757 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 06 13:39:56 crc kubenswrapper[4757]: E1006 13:39:56.037074 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-06 13:41:00.037034181 +0000 UTC m=+148.534352758 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:39:56 crc kubenswrapper[4757]: E1006 13:39:56.037170 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-06 13:41:00.037149236 +0000 UTC m=+148.534467803 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 06 13:39:56 crc kubenswrapper[4757]: I1006 13:39:56.037345 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 06 13:39:56 crc kubenswrapper[4757]: E1006 13:39:56.037485 4757 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 06 13:39:56 crc kubenswrapper[4757]: E1006 13:39:56.037539 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-06 13:41:00.037520562 +0000 UTC m=+148.534839189 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 06 13:39:56 crc kubenswrapper[4757]: I1006 13:39:56.074201 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:56 crc kubenswrapper[4757]: I1006 13:39:56.074256 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:56 crc kubenswrapper[4757]: I1006 13:39:56.074267 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:56 crc kubenswrapper[4757]: I1006 13:39:56.074288 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:56 crc kubenswrapper[4757]: I1006 13:39:56.074298 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:56Z","lastTransitionTime":"2025-10-06T13:39:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:56 crc kubenswrapper[4757]: I1006 13:39:56.138265 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 06 13:39:56 crc kubenswrapper[4757]: I1006 13:39:56.138490 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 06 13:39:56 crc kubenswrapper[4757]: E1006 13:39:56.138506 4757 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 06 13:39:56 crc kubenswrapper[4757]: E1006 13:39:56.138548 4757 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 06 13:39:56 crc kubenswrapper[4757]: E1006 13:39:56.138579 4757 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 06 13:39:56 crc kubenswrapper[4757]: E1006 13:39:56.138665 4757 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 06 13:39:56 crc kubenswrapper[4757]: E1006 13:39:56.138684 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-06 13:41:00.138655292 +0000 UTC m=+148.635973869 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 06 13:39:56 crc kubenswrapper[4757]: E1006 13:39:56.138697 4757 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 06 13:39:56 crc kubenswrapper[4757]: E1006 13:39:56.138718 4757 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 06 13:39:56 crc kubenswrapper[4757]: E1006 13:39:56.138780 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-06 13:41:00.138758966 +0000 UTC m=+148.636077543 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 06 13:39:56 crc kubenswrapper[4757]: I1006 13:39:56.176716 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:56 crc kubenswrapper[4757]: I1006 13:39:56.176758 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:56 crc kubenswrapper[4757]: I1006 13:39:56.176769 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:56 crc kubenswrapper[4757]: I1006 13:39:56.176786 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:56 crc kubenswrapper[4757]: I1006 13:39:56.176797 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:56Z","lastTransitionTime":"2025-10-06T13:39:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:56 crc kubenswrapper[4757]: I1006 13:39:56.180027 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 06 13:39:56 crc kubenswrapper[4757]: E1006 13:39:56.180176 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 06 13:39:56 crc kubenswrapper[4757]: I1006 13:39:56.180340 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-sc9qx" Oct 06 13:39:56 crc kubenswrapper[4757]: E1006 13:39:56.180419 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-sc9qx" podUID="8a0a24d2-8946-4710-91f2-cc59ecedb5e3" Oct 06 13:39:56 crc kubenswrapper[4757]: I1006 13:39:56.180534 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 06 13:39:56 crc kubenswrapper[4757]: E1006 13:39:56.180690 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 06 13:39:56 crc kubenswrapper[4757]: I1006 13:39:56.278930 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:56 crc kubenswrapper[4757]: I1006 13:39:56.278974 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:56 crc kubenswrapper[4757]: I1006 13:39:56.278987 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:56 crc kubenswrapper[4757]: I1006 13:39:56.279006 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:56 crc kubenswrapper[4757]: I1006 13:39:56.279019 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:56Z","lastTransitionTime":"2025-10-06T13:39:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:56 crc kubenswrapper[4757]: I1006 13:39:56.381540 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:56 crc kubenswrapper[4757]: I1006 13:39:56.381599 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:56 crc kubenswrapper[4757]: I1006 13:39:56.381617 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:56 crc kubenswrapper[4757]: I1006 13:39:56.381640 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:56 crc kubenswrapper[4757]: I1006 13:39:56.381659 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:56Z","lastTransitionTime":"2025-10-06T13:39:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:56 crc kubenswrapper[4757]: I1006 13:39:56.485669 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:56 crc kubenswrapper[4757]: I1006 13:39:56.485721 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:56 crc kubenswrapper[4757]: I1006 13:39:56.485732 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:56 crc kubenswrapper[4757]: I1006 13:39:56.485756 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:56 crc kubenswrapper[4757]: I1006 13:39:56.485771 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:56Z","lastTransitionTime":"2025-10-06T13:39:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:56 crc kubenswrapper[4757]: I1006 13:39:56.589295 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:56 crc kubenswrapper[4757]: I1006 13:39:56.589342 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:56 crc kubenswrapper[4757]: I1006 13:39:56.589354 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:56 crc kubenswrapper[4757]: I1006 13:39:56.589374 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:56 crc kubenswrapper[4757]: I1006 13:39:56.589387 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:56Z","lastTransitionTime":"2025-10-06T13:39:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:56 crc kubenswrapper[4757]: I1006 13:39:56.692604 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:56 crc kubenswrapper[4757]: I1006 13:39:56.693171 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:56 crc kubenswrapper[4757]: I1006 13:39:56.693186 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:56 crc kubenswrapper[4757]: I1006 13:39:56.693211 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:56 crc kubenswrapper[4757]: I1006 13:39:56.693225 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:56Z","lastTransitionTime":"2025-10-06T13:39:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:56 crc kubenswrapper[4757]: I1006 13:39:56.796564 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:56 crc kubenswrapper[4757]: I1006 13:39:56.796631 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:56 crc kubenswrapper[4757]: I1006 13:39:56.796656 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:56 crc kubenswrapper[4757]: I1006 13:39:56.796679 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:56 crc kubenswrapper[4757]: I1006 13:39:56.796695 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:56Z","lastTransitionTime":"2025-10-06T13:39:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:56 crc kubenswrapper[4757]: I1006 13:39:56.900136 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:56 crc kubenswrapper[4757]: I1006 13:39:56.900206 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:56 crc kubenswrapper[4757]: I1006 13:39:56.900220 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:56 crc kubenswrapper[4757]: I1006 13:39:56.900234 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:56 crc kubenswrapper[4757]: I1006 13:39:56.900244 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:56Z","lastTransitionTime":"2025-10-06T13:39:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:57 crc kubenswrapper[4757]: I1006 13:39:57.003497 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:57 crc kubenswrapper[4757]: I1006 13:39:57.003554 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:57 crc kubenswrapper[4757]: I1006 13:39:57.003565 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:57 crc kubenswrapper[4757]: I1006 13:39:57.003585 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:57 crc kubenswrapper[4757]: I1006 13:39:57.003596 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:57Z","lastTransitionTime":"2025-10-06T13:39:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:57 crc kubenswrapper[4757]: I1006 13:39:57.106689 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:57 crc kubenswrapper[4757]: I1006 13:39:57.106756 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:57 crc kubenswrapper[4757]: I1006 13:39:57.106770 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:57 crc kubenswrapper[4757]: I1006 13:39:57.106790 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:57 crc kubenswrapper[4757]: I1006 13:39:57.106801 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:57Z","lastTransitionTime":"2025-10-06T13:39:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:57 crc kubenswrapper[4757]: I1006 13:39:57.180178 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 06 13:39:57 crc kubenswrapper[4757]: E1006 13:39:57.180403 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 06 13:39:57 crc kubenswrapper[4757]: I1006 13:39:57.210655 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:57 crc kubenswrapper[4757]: I1006 13:39:57.210722 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:57 crc kubenswrapper[4757]: I1006 13:39:57.210739 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:57 crc kubenswrapper[4757]: I1006 13:39:57.210758 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:57 crc kubenswrapper[4757]: I1006 13:39:57.210768 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:57Z","lastTransitionTime":"2025-10-06T13:39:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:57 crc kubenswrapper[4757]: I1006 13:39:57.314132 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:57 crc kubenswrapper[4757]: I1006 13:39:57.314226 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:57 crc kubenswrapper[4757]: I1006 13:39:57.314251 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:57 crc kubenswrapper[4757]: I1006 13:39:57.314280 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:57 crc kubenswrapper[4757]: I1006 13:39:57.314301 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:57Z","lastTransitionTime":"2025-10-06T13:39:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:57 crc kubenswrapper[4757]: I1006 13:39:57.417995 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:57 crc kubenswrapper[4757]: I1006 13:39:57.418070 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:57 crc kubenswrapper[4757]: I1006 13:39:57.418082 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:57 crc kubenswrapper[4757]: I1006 13:39:57.418120 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:57 crc kubenswrapper[4757]: I1006 13:39:57.418133 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:57Z","lastTransitionTime":"2025-10-06T13:39:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:57 crc kubenswrapper[4757]: I1006 13:39:57.520827 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:57 crc kubenswrapper[4757]: I1006 13:39:57.520888 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:57 crc kubenswrapper[4757]: I1006 13:39:57.520914 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:57 crc kubenswrapper[4757]: I1006 13:39:57.520942 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:57 crc kubenswrapper[4757]: I1006 13:39:57.520963 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:57Z","lastTransitionTime":"2025-10-06T13:39:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:57 crc kubenswrapper[4757]: I1006 13:39:57.624759 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:57 crc kubenswrapper[4757]: I1006 13:39:57.624826 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:57 crc kubenswrapper[4757]: I1006 13:39:57.624848 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:57 crc kubenswrapper[4757]: I1006 13:39:57.624876 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:57 crc kubenswrapper[4757]: I1006 13:39:57.624900 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:57Z","lastTransitionTime":"2025-10-06T13:39:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:57 crc kubenswrapper[4757]: I1006 13:39:57.727523 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:57 crc kubenswrapper[4757]: I1006 13:39:57.727596 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:57 crc kubenswrapper[4757]: I1006 13:39:57.727611 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:57 crc kubenswrapper[4757]: I1006 13:39:57.727627 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:57 crc kubenswrapper[4757]: I1006 13:39:57.727638 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:57Z","lastTransitionTime":"2025-10-06T13:39:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:57 crc kubenswrapper[4757]: I1006 13:39:57.831018 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:57 crc kubenswrapper[4757]: I1006 13:39:57.831061 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:57 crc kubenswrapper[4757]: I1006 13:39:57.831072 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:57 crc kubenswrapper[4757]: I1006 13:39:57.831105 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:57 crc kubenswrapper[4757]: I1006 13:39:57.831119 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:57Z","lastTransitionTime":"2025-10-06T13:39:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:57 crc kubenswrapper[4757]: I1006 13:39:57.934931 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:57 crc kubenswrapper[4757]: I1006 13:39:57.934972 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:57 crc kubenswrapper[4757]: I1006 13:39:57.934983 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:57 crc kubenswrapper[4757]: I1006 13:39:57.934998 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:57 crc kubenswrapper[4757]: I1006 13:39:57.935010 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:57Z","lastTransitionTime":"2025-10-06T13:39:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:58 crc kubenswrapper[4757]: I1006 13:39:58.038199 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:58 crc kubenswrapper[4757]: I1006 13:39:58.038257 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:58 crc kubenswrapper[4757]: I1006 13:39:58.038269 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:58 crc kubenswrapper[4757]: I1006 13:39:58.038289 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:58 crc kubenswrapper[4757]: I1006 13:39:58.038301 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:58Z","lastTransitionTime":"2025-10-06T13:39:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:58 crc kubenswrapper[4757]: I1006 13:39:58.142603 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:58 crc kubenswrapper[4757]: I1006 13:39:58.142676 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:58 crc kubenswrapper[4757]: I1006 13:39:58.142701 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:58 crc kubenswrapper[4757]: I1006 13:39:58.142725 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:58 crc kubenswrapper[4757]: I1006 13:39:58.142742 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:58Z","lastTransitionTime":"2025-10-06T13:39:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:58 crc kubenswrapper[4757]: I1006 13:39:58.179963 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-sc9qx" Oct 06 13:39:58 crc kubenswrapper[4757]: I1006 13:39:58.180140 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 06 13:39:58 crc kubenswrapper[4757]: I1006 13:39:58.180185 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 06 13:39:58 crc kubenswrapper[4757]: E1006 13:39:58.180265 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 06 13:39:58 crc kubenswrapper[4757]: E1006 13:39:58.180485 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 06 13:39:58 crc kubenswrapper[4757]: E1006 13:39:58.180632 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-sc9qx" podUID="8a0a24d2-8946-4710-91f2-cc59ecedb5e3" Oct 06 13:39:58 crc kubenswrapper[4757]: I1006 13:39:58.245946 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:58 crc kubenswrapper[4757]: I1006 13:39:58.246001 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:58 crc kubenswrapper[4757]: I1006 13:39:58.246014 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:58 crc kubenswrapper[4757]: I1006 13:39:58.246032 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:58 crc kubenswrapper[4757]: I1006 13:39:58.246045 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:58Z","lastTransitionTime":"2025-10-06T13:39:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:58 crc kubenswrapper[4757]: I1006 13:39:58.349066 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:58 crc kubenswrapper[4757]: I1006 13:39:58.349194 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:58 crc kubenswrapper[4757]: I1006 13:39:58.349215 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:58 crc kubenswrapper[4757]: I1006 13:39:58.349239 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:58 crc kubenswrapper[4757]: I1006 13:39:58.349260 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:58Z","lastTransitionTime":"2025-10-06T13:39:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:58 crc kubenswrapper[4757]: I1006 13:39:58.452081 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:58 crc kubenswrapper[4757]: I1006 13:39:58.452146 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:58 crc kubenswrapper[4757]: I1006 13:39:58.452158 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:58 crc kubenswrapper[4757]: I1006 13:39:58.452175 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:58 crc kubenswrapper[4757]: I1006 13:39:58.452188 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:58Z","lastTransitionTime":"2025-10-06T13:39:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:58 crc kubenswrapper[4757]: I1006 13:39:58.555520 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:58 crc kubenswrapper[4757]: I1006 13:39:58.555599 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:58 crc kubenswrapper[4757]: I1006 13:39:58.555610 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:58 crc kubenswrapper[4757]: I1006 13:39:58.555629 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:58 crc kubenswrapper[4757]: I1006 13:39:58.555643 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:58Z","lastTransitionTime":"2025-10-06T13:39:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:58 crc kubenswrapper[4757]: I1006 13:39:58.659574 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:58 crc kubenswrapper[4757]: I1006 13:39:58.659654 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:58 crc kubenswrapper[4757]: I1006 13:39:58.659678 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:58 crc kubenswrapper[4757]: I1006 13:39:58.659766 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:58 crc kubenswrapper[4757]: I1006 13:39:58.659787 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:58Z","lastTransitionTime":"2025-10-06T13:39:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:58 crc kubenswrapper[4757]: I1006 13:39:58.762816 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:58 crc kubenswrapper[4757]: I1006 13:39:58.762872 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:58 crc kubenswrapper[4757]: I1006 13:39:58.762889 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:58 crc kubenswrapper[4757]: I1006 13:39:58.762908 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:58 crc kubenswrapper[4757]: I1006 13:39:58.762926 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:58Z","lastTransitionTime":"2025-10-06T13:39:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:58 crc kubenswrapper[4757]: I1006 13:39:58.865654 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:58 crc kubenswrapper[4757]: I1006 13:39:58.865686 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:58 crc kubenswrapper[4757]: I1006 13:39:58.865725 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:58 crc kubenswrapper[4757]: I1006 13:39:58.865739 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:58 crc kubenswrapper[4757]: I1006 13:39:58.865747 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:58Z","lastTransitionTime":"2025-10-06T13:39:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:58 crc kubenswrapper[4757]: I1006 13:39:58.968776 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:58 crc kubenswrapper[4757]: I1006 13:39:58.968855 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:58 crc kubenswrapper[4757]: I1006 13:39:58.968867 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:58 crc kubenswrapper[4757]: I1006 13:39:58.968906 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:58 crc kubenswrapper[4757]: I1006 13:39:58.968919 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:58Z","lastTransitionTime":"2025-10-06T13:39:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:59 crc kubenswrapper[4757]: I1006 13:39:59.072178 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:59 crc kubenswrapper[4757]: I1006 13:39:59.072238 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:59 crc kubenswrapper[4757]: I1006 13:39:59.072253 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:59 crc kubenswrapper[4757]: I1006 13:39:59.072275 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:59 crc kubenswrapper[4757]: I1006 13:39:59.072288 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:59Z","lastTransitionTime":"2025-10-06T13:39:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:59 crc kubenswrapper[4757]: I1006 13:39:59.174914 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:59 crc kubenswrapper[4757]: I1006 13:39:59.175030 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:59 crc kubenswrapper[4757]: I1006 13:39:59.175040 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:59 crc kubenswrapper[4757]: I1006 13:39:59.175055 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:59 crc kubenswrapper[4757]: I1006 13:39:59.175066 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:59Z","lastTransitionTime":"2025-10-06T13:39:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:59 crc kubenswrapper[4757]: I1006 13:39:59.179251 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 06 13:39:59 crc kubenswrapper[4757]: E1006 13:39:59.179430 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 06 13:39:59 crc kubenswrapper[4757]: I1006 13:39:59.278042 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:59 crc kubenswrapper[4757]: I1006 13:39:59.278129 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:59 crc kubenswrapper[4757]: I1006 13:39:59.278144 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:59 crc kubenswrapper[4757]: I1006 13:39:59.278161 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:59 crc kubenswrapper[4757]: I1006 13:39:59.278175 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:59Z","lastTransitionTime":"2025-10-06T13:39:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:59 crc kubenswrapper[4757]: I1006 13:39:59.380970 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:59 crc kubenswrapper[4757]: I1006 13:39:59.381428 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:59 crc kubenswrapper[4757]: I1006 13:39:59.381527 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:59 crc kubenswrapper[4757]: I1006 13:39:59.381662 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:59 crc kubenswrapper[4757]: I1006 13:39:59.381780 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:59Z","lastTransitionTime":"2025-10-06T13:39:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:59 crc kubenswrapper[4757]: I1006 13:39:59.483772 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:59 crc kubenswrapper[4757]: I1006 13:39:59.483828 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:59 crc kubenswrapper[4757]: I1006 13:39:59.483841 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:59 crc kubenswrapper[4757]: I1006 13:39:59.483862 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:59 crc kubenswrapper[4757]: I1006 13:39:59.483875 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:59Z","lastTransitionTime":"2025-10-06T13:39:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:59 crc kubenswrapper[4757]: I1006 13:39:59.586903 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:59 crc kubenswrapper[4757]: I1006 13:39:59.586967 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:59 crc kubenswrapper[4757]: I1006 13:39:59.586978 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:59 crc kubenswrapper[4757]: I1006 13:39:59.586993 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:59 crc kubenswrapper[4757]: I1006 13:39:59.587005 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:59Z","lastTransitionTime":"2025-10-06T13:39:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:59 crc kubenswrapper[4757]: I1006 13:39:59.689252 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:59 crc kubenswrapper[4757]: I1006 13:39:59.689612 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:59 crc kubenswrapper[4757]: I1006 13:39:59.689721 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:59 crc kubenswrapper[4757]: I1006 13:39:59.689811 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:59 crc kubenswrapper[4757]: I1006 13:39:59.689904 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:59Z","lastTransitionTime":"2025-10-06T13:39:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:59 crc kubenswrapper[4757]: I1006 13:39:59.792721 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:59 crc kubenswrapper[4757]: I1006 13:39:59.792782 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:59 crc kubenswrapper[4757]: I1006 13:39:59.792799 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:59 crc kubenswrapper[4757]: I1006 13:39:59.792824 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:59 crc kubenswrapper[4757]: I1006 13:39:59.792841 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:59Z","lastTransitionTime":"2025-10-06T13:39:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:59 crc kubenswrapper[4757]: I1006 13:39:59.896218 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:59 crc kubenswrapper[4757]: I1006 13:39:59.897839 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:59 crc kubenswrapper[4757]: I1006 13:39:59.897998 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:59 crc kubenswrapper[4757]: I1006 13:39:59.898170 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:59 crc kubenswrapper[4757]: I1006 13:39:59.898299 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:59Z","lastTransitionTime":"2025-10-06T13:39:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:39:59 crc kubenswrapper[4757]: I1006 13:39:59.927754 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 06 13:39:59 crc kubenswrapper[4757]: I1006 13:39:59.927830 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 06 13:39:59 crc kubenswrapper[4757]: I1006 13:39:59.927865 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 06 13:39:59 crc kubenswrapper[4757]: I1006 13:39:59.927886 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 06 13:39:59 crc kubenswrapper[4757]: I1006 13:39:59.927896 4757 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-06T13:39:59Z","lastTransitionTime":"2025-10-06T13:39:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 06 13:40:00 crc kubenswrapper[4757]: I1006 13:40:00.003791 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-2rz4n"] Oct 06 13:40:00 crc kubenswrapper[4757]: I1006 13:40:00.004653 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-2rz4n" Oct 06 13:40:00 crc kubenswrapper[4757]: I1006 13:40:00.006900 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Oct 06 13:40:00 crc kubenswrapper[4757]: I1006 13:40:00.006932 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Oct 06 13:40:00 crc kubenswrapper[4757]: I1006 13:40:00.007248 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Oct 06 13:40:00 crc kubenswrapper[4757]: I1006 13:40:00.007393 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Oct 06 13:40:00 crc kubenswrapper[4757]: I1006 13:40:00.039887 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=69.039870382 podStartE2EDuration="1m9.039870382s" podCreationTimestamp="2025-10-06 13:38:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:40:00.023104249 +0000 UTC m=+88.520422786" watchObservedRunningTime="2025-10-06 13:40:00.039870382 +0000 UTC m=+88.537188919" Oct 06 13:40:00 crc kubenswrapper[4757]: I1006 13:40:00.067360 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=39.06733969 podStartE2EDuration="39.06733969s" podCreationTimestamp="2025-10-06 13:39:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:40:00.054738674 +0000 UTC m=+88.552057221" watchObservedRunningTime="2025-10-06 13:40:00.06733969 +0000 UTC m=+88.564658227" Oct 06 13:40:00 crc kubenswrapper[4757]: I1006 13:40:00.086003 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podStartSLOduration=69.085982743 podStartE2EDuration="1m9.085982743s" podCreationTimestamp="2025-10-06 13:38:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:40:00.067598521 +0000 UTC m=+88.564917068" watchObservedRunningTime="2025-10-06 13:40:00.085982743 +0000 UTC m=+88.583301280" Oct 06 13:40:00 crc kubenswrapper[4757]: I1006 13:40:00.086655 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/7b68fe3c-4ac3-430f-8b60-26f83d3dfe95-service-ca\") pod \"cluster-version-operator-5c965bbfc6-2rz4n\" (UID: \"7b68fe3c-4ac3-430f-8b60-26f83d3dfe95\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-2rz4n" Oct 06 13:40:00 crc kubenswrapper[4757]: I1006 13:40:00.086683 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7b68fe3c-4ac3-430f-8b60-26f83d3dfe95-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-2rz4n\" (UID: \"7b68fe3c-4ac3-430f-8b60-26f83d3dfe95\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-2rz4n" Oct 06 13:40:00 crc kubenswrapper[4757]: I1006 13:40:00.086701 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/7b68fe3c-4ac3-430f-8b60-26f83d3dfe95-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-2rz4n\" (UID: \"7b68fe3c-4ac3-430f-8b60-26f83d3dfe95\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-2rz4n" Oct 06 13:40:00 crc kubenswrapper[4757]: I1006 13:40:00.086735 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7b68fe3c-4ac3-430f-8b60-26f83d3dfe95-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-2rz4n\" (UID: \"7b68fe3c-4ac3-430f-8b60-26f83d3dfe95\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-2rz4n" Oct 06 13:40:00 crc kubenswrapper[4757]: I1006 13:40:00.086766 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/7b68fe3c-4ac3-430f-8b60-26f83d3dfe95-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-2rz4n\" (UID: \"7b68fe3c-4ac3-430f-8b60-26f83d3dfe95\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-2rz4n" Oct 06 13:40:00 crc kubenswrapper[4757]: I1006 13:40:00.103943 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-9qf7z" podStartSLOduration=69.103921106 podStartE2EDuration="1m9.103921106s" podCreationTimestamp="2025-10-06 13:38:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:40:00.087055689 +0000 UTC m=+88.584374246" watchObservedRunningTime="2025-10-06 13:40:00.103921106 +0000 UTC m=+88.601239653" Oct 06 13:40:00 crc kubenswrapper[4757]: I1006 13:40:00.118142 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-rhrzr" podStartSLOduration=69.118121279 podStartE2EDuration="1m9.118121279s" podCreationTimestamp="2025-10-06 13:38:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:40:00.104539061 +0000 UTC m=+88.601857608" watchObservedRunningTime="2025-10-06 13:40:00.118121279 +0000 UTC m=+88.615439816" Oct 06 13:40:00 crc kubenswrapper[4757]: I1006 13:40:00.118287 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-j889t" podStartSLOduration=69.118283655 podStartE2EDuration="1m9.118283655s" podCreationTimestamp="2025-10-06 13:38:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:40:00.118253434 +0000 UTC m=+88.615571971" watchObservedRunningTime="2025-10-06 13:40:00.118283655 +0000 UTC m=+88.615602192" Oct 06 13:40:00 crc kubenswrapper[4757]: I1006 13:40:00.154611 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l2rjj" podStartSLOduration=68.15459584 podStartE2EDuration="1m8.15459584s" podCreationTimestamp="2025-10-06 13:38:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:40:00.132238489 +0000 UTC m=+88.629557026" watchObservedRunningTime="2025-10-06 13:40:00.15459584 +0000 UTC m=+88.651914377" Oct 06 13:40:00 crc kubenswrapper[4757]: I1006 13:40:00.168677 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=67.168658187 podStartE2EDuration="1m7.168658187s" podCreationTimestamp="2025-10-06 13:38:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:40:00.154733445 +0000 UTC m=+88.652051982" watchObservedRunningTime="2025-10-06 13:40:00.168658187 +0000 UTC m=+88.665976724" Oct 06 13:40:00 crc kubenswrapper[4757]: I1006 13:40:00.179124 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 06 13:40:00 crc kubenswrapper[4757]: I1006 13:40:00.179183 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-sc9qx" Oct 06 13:40:00 crc kubenswrapper[4757]: E1006 13:40:00.179246 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 06 13:40:00 crc kubenswrapper[4757]: I1006 13:40:00.179125 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 06 13:40:00 crc kubenswrapper[4757]: E1006 13:40:00.179370 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-sc9qx" podUID="8a0a24d2-8946-4710-91f2-cc59ecedb5e3" Oct 06 13:40:00 crc kubenswrapper[4757]: E1006 13:40:00.179448 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 06 13:40:00 crc kubenswrapper[4757]: I1006 13:40:00.187744 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/7b68fe3c-4ac3-430f-8b60-26f83d3dfe95-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-2rz4n\" (UID: \"7b68fe3c-4ac3-430f-8b60-26f83d3dfe95\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-2rz4n" Oct 06 13:40:00 crc kubenswrapper[4757]: I1006 13:40:00.187842 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/7b68fe3c-4ac3-430f-8b60-26f83d3dfe95-service-ca\") pod \"cluster-version-operator-5c965bbfc6-2rz4n\" (UID: \"7b68fe3c-4ac3-430f-8b60-26f83d3dfe95\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-2rz4n" Oct 06 13:40:00 crc kubenswrapper[4757]: I1006 13:40:00.187844 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/7b68fe3c-4ac3-430f-8b60-26f83d3dfe95-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-2rz4n\" (UID: \"7b68fe3c-4ac3-430f-8b60-26f83d3dfe95\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-2rz4n" Oct 06 13:40:00 crc kubenswrapper[4757]: I1006 13:40:00.187869 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7b68fe3c-4ac3-430f-8b60-26f83d3dfe95-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-2rz4n\" (UID: \"7b68fe3c-4ac3-430f-8b60-26f83d3dfe95\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-2rz4n" Oct 06 13:40:00 crc kubenswrapper[4757]: I1006 13:40:00.187947 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/7b68fe3c-4ac3-430f-8b60-26f83d3dfe95-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-2rz4n\" (UID: \"7b68fe3c-4ac3-430f-8b60-26f83d3dfe95\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-2rz4n" Oct 06 13:40:00 crc kubenswrapper[4757]: I1006 13:40:00.187996 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7b68fe3c-4ac3-430f-8b60-26f83d3dfe95-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-2rz4n\" (UID: \"7b68fe3c-4ac3-430f-8b60-26f83d3dfe95\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-2rz4n" Oct 06 13:40:00 crc kubenswrapper[4757]: I1006 13:40:00.188050 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/7b68fe3c-4ac3-430f-8b60-26f83d3dfe95-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-2rz4n\" (UID: \"7b68fe3c-4ac3-430f-8b60-26f83d3dfe95\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-2rz4n" Oct 06 13:40:00 crc kubenswrapper[4757]: I1006 13:40:00.189084 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/7b68fe3c-4ac3-430f-8b60-26f83d3dfe95-service-ca\") pod \"cluster-version-operator-5c965bbfc6-2rz4n\" (UID: \"7b68fe3c-4ac3-430f-8b60-26f83d3dfe95\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-2rz4n" Oct 06 13:40:00 crc kubenswrapper[4757]: I1006 13:40:00.195960 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7b68fe3c-4ac3-430f-8b60-26f83d3dfe95-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-2rz4n\" (UID: \"7b68fe3c-4ac3-430f-8b60-26f83d3dfe95\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-2rz4n" Oct 06 13:40:00 crc kubenswrapper[4757]: I1006 13:40:00.204606 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7b68fe3c-4ac3-430f-8b60-26f83d3dfe95-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-2rz4n\" (UID: \"7b68fe3c-4ac3-430f-8b60-26f83d3dfe95\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-2rz4n" Oct 06 13:40:00 crc kubenswrapper[4757]: I1006 13:40:00.222930 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-9j5jn" podStartSLOduration=69.222909294 podStartE2EDuration="1m9.222909294s" podCreationTimestamp="2025-10-06 13:38:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:40:00.222545638 +0000 UTC m=+88.719864185" watchObservedRunningTime="2025-10-06 13:40:00.222909294 +0000 UTC m=+88.720227841" Oct 06 13:40:00 crc kubenswrapper[4757]: I1006 13:40:00.328047 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-2rz4n" Oct 06 13:40:00 crc kubenswrapper[4757]: I1006 13:40:00.698723 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-2rz4n" event={"ID":"7b68fe3c-4ac3-430f-8b60-26f83d3dfe95","Type":"ContainerStarted","Data":"dd49f3a2c8dc89d42c94d4fc93ea10a8e959827d097ef372420c8880682ffc39"} Oct 06 13:40:00 crc kubenswrapper[4757]: I1006 13:40:00.698808 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-2rz4n" event={"ID":"7b68fe3c-4ac3-430f-8b60-26f83d3dfe95","Type":"ContainerStarted","Data":"f6b558332043e78888dfb8128e143130db6b4bb667546800b92ecf427373adaf"} Oct 06 13:40:01 crc kubenswrapper[4757]: I1006 13:40:01.179003 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 06 13:40:01 crc kubenswrapper[4757]: E1006 13:40:01.179210 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 06 13:40:02 crc kubenswrapper[4757]: I1006 13:40:02.179808 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 06 13:40:02 crc kubenswrapper[4757]: I1006 13:40:02.179808 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 06 13:40:02 crc kubenswrapper[4757]: I1006 13:40:02.179819 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-sc9qx" Oct 06 13:40:02 crc kubenswrapper[4757]: E1006 13:40:02.180939 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 06 13:40:02 crc kubenswrapper[4757]: E1006 13:40:02.181058 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 06 13:40:02 crc kubenswrapper[4757]: E1006 13:40:02.181223 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-sc9qx" podUID="8a0a24d2-8946-4710-91f2-cc59ecedb5e3" Oct 06 13:40:03 crc kubenswrapper[4757]: I1006 13:40:03.180123 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 06 13:40:03 crc kubenswrapper[4757]: E1006 13:40:03.180340 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 06 13:40:04 crc kubenswrapper[4757]: I1006 13:40:04.179385 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-sc9qx" Oct 06 13:40:04 crc kubenswrapper[4757]: I1006 13:40:04.179507 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 06 13:40:04 crc kubenswrapper[4757]: E1006 13:40:04.180042 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-sc9qx" podUID="8a0a24d2-8946-4710-91f2-cc59ecedb5e3" Oct 06 13:40:04 crc kubenswrapper[4757]: I1006 13:40:04.179553 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 06 13:40:04 crc kubenswrapper[4757]: E1006 13:40:04.180344 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 06 13:40:04 crc kubenswrapper[4757]: E1006 13:40:04.180527 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 06 13:40:05 crc kubenswrapper[4757]: I1006 13:40:05.179998 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 06 13:40:05 crc kubenswrapper[4757]: E1006 13:40:05.180163 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 06 13:40:06 crc kubenswrapper[4757]: I1006 13:40:06.179128 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 06 13:40:06 crc kubenswrapper[4757]: E1006 13:40:06.179273 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 06 13:40:06 crc kubenswrapper[4757]: I1006 13:40:06.179291 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 06 13:40:06 crc kubenswrapper[4757]: I1006 13:40:06.179146 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-sc9qx" Oct 06 13:40:06 crc kubenswrapper[4757]: I1006 13:40:06.180232 4757 scope.go:117] "RemoveContainer" containerID="c59b4f6d5b02fd4640ea7a51803ca4d31b0440d8779c46f342b1e5af2e8a2bf0" Oct 06 13:40:06 crc kubenswrapper[4757]: E1006 13:40:06.180477 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-58bhb_openshift-ovn-kubernetes(a6624d05-e024-49f2-bf87-33e7ea4fccbb)\"" pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" podUID="a6624d05-e024-49f2-bf87-33e7ea4fccbb" Oct 06 13:40:06 crc kubenswrapper[4757]: E1006 13:40:06.179866 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-sc9qx" podUID="8a0a24d2-8946-4710-91f2-cc59ecedb5e3" Oct 06 13:40:06 crc kubenswrapper[4757]: E1006 13:40:06.180470 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 06 13:40:06 crc kubenswrapper[4757]: I1006 13:40:06.196200 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-2rz4n" podStartSLOduration=75.196182913 podStartE2EDuration="1m15.196182913s" podCreationTimestamp="2025-10-06 13:38:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:40:00.721171506 +0000 UTC m=+89.218490103" watchObservedRunningTime="2025-10-06 13:40:06.196182913 +0000 UTC m=+94.693501450" Oct 06 13:40:06 crc kubenswrapper[4757]: I1006 13:40:06.196600 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Oct 06 13:40:07 crc kubenswrapper[4757]: I1006 13:40:07.179518 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 06 13:40:07 crc kubenswrapper[4757]: E1006 13:40:07.179661 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 06 13:40:08 crc kubenswrapper[4757]: I1006 13:40:08.179808 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 06 13:40:08 crc kubenswrapper[4757]: I1006 13:40:08.179895 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-sc9qx" Oct 06 13:40:08 crc kubenswrapper[4757]: E1006 13:40:08.181079 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 06 13:40:08 crc kubenswrapper[4757]: E1006 13:40:08.181279 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-sc9qx" podUID="8a0a24d2-8946-4710-91f2-cc59ecedb5e3" Oct 06 13:40:08 crc kubenswrapper[4757]: I1006 13:40:08.180358 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 06 13:40:08 crc kubenswrapper[4757]: E1006 13:40:08.181785 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 06 13:40:09 crc kubenswrapper[4757]: I1006 13:40:09.179322 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 06 13:40:09 crc kubenswrapper[4757]: E1006 13:40:09.179569 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 06 13:40:10 crc kubenswrapper[4757]: I1006 13:40:10.179078 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 06 13:40:10 crc kubenswrapper[4757]: I1006 13:40:10.179142 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-sc9qx" Oct 06 13:40:10 crc kubenswrapper[4757]: E1006 13:40:10.179284 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 06 13:40:10 crc kubenswrapper[4757]: I1006 13:40:10.179347 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 06 13:40:10 crc kubenswrapper[4757]: E1006 13:40:10.179461 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 06 13:40:10 crc kubenswrapper[4757]: E1006 13:40:10.179557 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-sc9qx" podUID="8a0a24d2-8946-4710-91f2-cc59ecedb5e3" Oct 06 13:40:10 crc kubenswrapper[4757]: I1006 13:40:10.203205 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/8a0a24d2-8946-4710-91f2-cc59ecedb5e3-metrics-certs\") pod \"network-metrics-daemon-sc9qx\" (UID: \"8a0a24d2-8946-4710-91f2-cc59ecedb5e3\") " pod="openshift-multus/network-metrics-daemon-sc9qx" Oct 06 13:40:10 crc kubenswrapper[4757]: E1006 13:40:10.203435 4757 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 06 13:40:10 crc kubenswrapper[4757]: E1006 13:40:10.203558 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/8a0a24d2-8946-4710-91f2-cc59ecedb5e3-metrics-certs podName:8a0a24d2-8946-4710-91f2-cc59ecedb5e3 nodeName:}" failed. No retries permitted until 2025-10-06 13:41:14.203532146 +0000 UTC m=+162.700850723 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/8a0a24d2-8946-4710-91f2-cc59ecedb5e3-metrics-certs") pod "network-metrics-daemon-sc9qx" (UID: "8a0a24d2-8946-4710-91f2-cc59ecedb5e3") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 06 13:40:11 crc kubenswrapper[4757]: I1006 13:40:11.179522 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 06 13:40:11 crc kubenswrapper[4757]: E1006 13:40:11.179817 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 06 13:40:12 crc kubenswrapper[4757]: I1006 13:40:12.180057 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-sc9qx" Oct 06 13:40:12 crc kubenswrapper[4757]: I1006 13:40:12.180084 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 06 13:40:12 crc kubenswrapper[4757]: I1006 13:40:12.180217 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 06 13:40:12 crc kubenswrapper[4757]: E1006 13:40:12.180457 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-sc9qx" podUID="8a0a24d2-8946-4710-91f2-cc59ecedb5e3" Oct 06 13:40:12 crc kubenswrapper[4757]: E1006 13:40:12.180737 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 06 13:40:12 crc kubenswrapper[4757]: E1006 13:40:12.181213 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 06 13:40:12 crc kubenswrapper[4757]: I1006 13:40:12.197283 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=6.197255734 podStartE2EDuration="6.197255734s" podCreationTimestamp="2025-10-06 13:40:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:40:12.195392586 +0000 UTC m=+100.692711203" watchObservedRunningTime="2025-10-06 13:40:12.197255734 +0000 UTC m=+100.694574301" Oct 06 13:40:13 crc kubenswrapper[4757]: I1006 13:40:13.179347 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 06 13:40:13 crc kubenswrapper[4757]: E1006 13:40:13.179585 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 06 13:40:14 crc kubenswrapper[4757]: I1006 13:40:14.180203 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-sc9qx" Oct 06 13:40:14 crc kubenswrapper[4757]: I1006 13:40:14.180320 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 06 13:40:14 crc kubenswrapper[4757]: E1006 13:40:14.180444 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-sc9qx" podUID="8a0a24d2-8946-4710-91f2-cc59ecedb5e3" Oct 06 13:40:14 crc kubenswrapper[4757]: E1006 13:40:14.180555 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 06 13:40:14 crc kubenswrapper[4757]: I1006 13:40:14.180259 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 06 13:40:14 crc kubenswrapper[4757]: E1006 13:40:14.181494 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 06 13:40:15 crc kubenswrapper[4757]: I1006 13:40:15.178989 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 06 13:40:15 crc kubenswrapper[4757]: E1006 13:40:15.179336 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 06 13:40:16 crc kubenswrapper[4757]: I1006 13:40:16.179363 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 06 13:40:16 crc kubenswrapper[4757]: I1006 13:40:16.179470 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 06 13:40:16 crc kubenswrapper[4757]: I1006 13:40:16.179518 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-sc9qx" Oct 06 13:40:16 crc kubenswrapper[4757]: E1006 13:40:16.179685 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 06 13:40:16 crc kubenswrapper[4757]: E1006 13:40:16.179836 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 06 13:40:16 crc kubenswrapper[4757]: E1006 13:40:16.179996 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-sc9qx" podUID="8a0a24d2-8946-4710-91f2-cc59ecedb5e3" Oct 06 13:40:17 crc kubenswrapper[4757]: I1006 13:40:17.179323 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 06 13:40:17 crc kubenswrapper[4757]: E1006 13:40:17.179511 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 06 13:40:17 crc kubenswrapper[4757]: I1006 13:40:17.180725 4757 scope.go:117] "RemoveContainer" containerID="c59b4f6d5b02fd4640ea7a51803ca4d31b0440d8779c46f342b1e5af2e8a2bf0" Oct 06 13:40:17 crc kubenswrapper[4757]: E1006 13:40:17.180930 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-58bhb_openshift-ovn-kubernetes(a6624d05-e024-49f2-bf87-33e7ea4fccbb)\"" pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" podUID="a6624d05-e024-49f2-bf87-33e7ea4fccbb" Oct 06 13:40:18 crc kubenswrapper[4757]: I1006 13:40:18.179717 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-sc9qx" Oct 06 13:40:18 crc kubenswrapper[4757]: E1006 13:40:18.179867 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-sc9qx" podUID="8a0a24d2-8946-4710-91f2-cc59ecedb5e3" Oct 06 13:40:18 crc kubenswrapper[4757]: I1006 13:40:18.179739 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 06 13:40:18 crc kubenswrapper[4757]: I1006 13:40:18.179718 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 06 13:40:18 crc kubenswrapper[4757]: E1006 13:40:18.179942 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 06 13:40:18 crc kubenswrapper[4757]: E1006 13:40:18.180011 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 06 13:40:19 crc kubenswrapper[4757]: I1006 13:40:19.179906 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 06 13:40:19 crc kubenswrapper[4757]: E1006 13:40:19.180077 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 06 13:40:19 crc kubenswrapper[4757]: I1006 13:40:19.202449 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Oct 06 13:40:20 crc kubenswrapper[4757]: I1006 13:40:20.179527 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 06 13:40:20 crc kubenswrapper[4757]: I1006 13:40:20.179562 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-sc9qx" Oct 06 13:40:20 crc kubenswrapper[4757]: I1006 13:40:20.179709 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 06 13:40:20 crc kubenswrapper[4757]: E1006 13:40:20.180515 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 06 13:40:20 crc kubenswrapper[4757]: E1006 13:40:20.180669 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-sc9qx" podUID="8a0a24d2-8946-4710-91f2-cc59ecedb5e3" Oct 06 13:40:20 crc kubenswrapper[4757]: E1006 13:40:20.180769 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 06 13:40:21 crc kubenswrapper[4757]: I1006 13:40:21.179566 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 06 13:40:21 crc kubenswrapper[4757]: E1006 13:40:21.179944 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 06 13:40:22 crc kubenswrapper[4757]: I1006 13:40:22.179004 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 06 13:40:22 crc kubenswrapper[4757]: I1006 13:40:22.179132 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 06 13:40:22 crc kubenswrapper[4757]: E1006 13:40:22.179196 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 06 13:40:22 crc kubenswrapper[4757]: I1006 13:40:22.179402 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-sc9qx" Oct 06 13:40:22 crc kubenswrapper[4757]: E1006 13:40:22.181466 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 06 13:40:22 crc kubenswrapper[4757]: E1006 13:40:22.181551 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-sc9qx" podUID="8a0a24d2-8946-4710-91f2-cc59ecedb5e3" Oct 06 13:40:22 crc kubenswrapper[4757]: I1006 13:40:22.212467 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=3.212405466 podStartE2EDuration="3.212405466s" podCreationTimestamp="2025-10-06 13:40:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:40:22.211851452 +0000 UTC m=+110.709170089" watchObservedRunningTime="2025-10-06 13:40:22.212405466 +0000 UTC m=+110.709724023" Oct 06 13:40:23 crc kubenswrapper[4757]: I1006 13:40:23.180059 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 06 13:40:23 crc kubenswrapper[4757]: E1006 13:40:23.180414 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 06 13:40:24 crc kubenswrapper[4757]: I1006 13:40:24.179748 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-sc9qx" Oct 06 13:40:24 crc kubenswrapper[4757]: I1006 13:40:24.179839 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 06 13:40:24 crc kubenswrapper[4757]: I1006 13:40:24.179942 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 06 13:40:24 crc kubenswrapper[4757]: E1006 13:40:24.179999 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 06 13:40:24 crc kubenswrapper[4757]: E1006 13:40:24.179856 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-sc9qx" podUID="8a0a24d2-8946-4710-91f2-cc59ecedb5e3" Oct 06 13:40:24 crc kubenswrapper[4757]: E1006 13:40:24.180165 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 06 13:40:25 crc kubenswrapper[4757]: I1006 13:40:25.179871 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 06 13:40:25 crc kubenswrapper[4757]: E1006 13:40:25.180140 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 06 13:40:26 crc kubenswrapper[4757]: I1006 13:40:26.180878 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-sc9qx" Oct 06 13:40:26 crc kubenswrapper[4757]: E1006 13:40:26.181045 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-sc9qx" podUID="8a0a24d2-8946-4710-91f2-cc59ecedb5e3" Oct 06 13:40:26 crc kubenswrapper[4757]: I1006 13:40:26.180949 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 06 13:40:26 crc kubenswrapper[4757]: E1006 13:40:26.181151 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 06 13:40:26 crc kubenswrapper[4757]: I1006 13:40:26.180935 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 06 13:40:26 crc kubenswrapper[4757]: E1006 13:40:26.181202 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 06 13:40:26 crc kubenswrapper[4757]: I1006 13:40:26.789762 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-9qf7z_9144d9fd-70d7-4a29-8e6b-c020c611980a/kube-multus/1.log" Oct 06 13:40:26 crc kubenswrapper[4757]: I1006 13:40:26.790209 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-9qf7z_9144d9fd-70d7-4a29-8e6b-c020c611980a/kube-multus/0.log" Oct 06 13:40:26 crc kubenswrapper[4757]: I1006 13:40:26.790260 4757 generic.go:334] "Generic (PLEG): container finished" podID="9144d9fd-70d7-4a29-8e6b-c020c611980a" containerID="ec9c673c59529f08cb919af940f8384584309e4e113c65431a529b935d1df3f9" exitCode=1 Oct 06 13:40:26 crc kubenswrapper[4757]: I1006 13:40:26.790301 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-9qf7z" event={"ID":"9144d9fd-70d7-4a29-8e6b-c020c611980a","Type":"ContainerDied","Data":"ec9c673c59529f08cb919af940f8384584309e4e113c65431a529b935d1df3f9"} Oct 06 13:40:26 crc kubenswrapper[4757]: I1006 13:40:26.790347 4757 scope.go:117] "RemoveContainer" containerID="7771512cbe14fa9b8d61be3a354a5fb8436b84ddf455ae4426440f1599f7ad90" Oct 06 13:40:26 crc kubenswrapper[4757]: I1006 13:40:26.790867 4757 scope.go:117] "RemoveContainer" containerID="ec9c673c59529f08cb919af940f8384584309e4e113c65431a529b935d1df3f9" Oct 06 13:40:26 crc kubenswrapper[4757]: E1006 13:40:26.791072 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-9qf7z_openshift-multus(9144d9fd-70d7-4a29-8e6b-c020c611980a)\"" pod="openshift-multus/multus-9qf7z" podUID="9144d9fd-70d7-4a29-8e6b-c020c611980a" Oct 06 13:40:27 crc kubenswrapper[4757]: I1006 13:40:27.179270 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 06 13:40:27 crc kubenswrapper[4757]: E1006 13:40:27.179581 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 06 13:40:27 crc kubenswrapper[4757]: I1006 13:40:27.795623 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-9qf7z_9144d9fd-70d7-4a29-8e6b-c020c611980a/kube-multus/1.log" Oct 06 13:40:28 crc kubenswrapper[4757]: I1006 13:40:28.179619 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-sc9qx" Oct 06 13:40:28 crc kubenswrapper[4757]: I1006 13:40:28.179672 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 06 13:40:28 crc kubenswrapper[4757]: E1006 13:40:28.179987 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-sc9qx" podUID="8a0a24d2-8946-4710-91f2-cc59ecedb5e3" Oct 06 13:40:28 crc kubenswrapper[4757]: E1006 13:40:28.180183 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 06 13:40:28 crc kubenswrapper[4757]: I1006 13:40:28.179619 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 06 13:40:28 crc kubenswrapper[4757]: E1006 13:40:28.180294 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 06 13:40:29 crc kubenswrapper[4757]: I1006 13:40:29.179712 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 06 13:40:29 crc kubenswrapper[4757]: E1006 13:40:29.179910 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 06 13:40:30 crc kubenswrapper[4757]: I1006 13:40:30.180146 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-sc9qx" Oct 06 13:40:30 crc kubenswrapper[4757]: I1006 13:40:30.180157 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 06 13:40:30 crc kubenswrapper[4757]: E1006 13:40:30.180329 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-sc9qx" podUID="8a0a24d2-8946-4710-91f2-cc59ecedb5e3" Oct 06 13:40:30 crc kubenswrapper[4757]: I1006 13:40:30.180465 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 06 13:40:30 crc kubenswrapper[4757]: E1006 13:40:30.180683 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 06 13:40:30 crc kubenswrapper[4757]: E1006 13:40:30.180804 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 06 13:40:31 crc kubenswrapper[4757]: I1006 13:40:31.180167 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 06 13:40:31 crc kubenswrapper[4757]: E1006 13:40:31.180838 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 06 13:40:31 crc kubenswrapper[4757]: I1006 13:40:31.181335 4757 scope.go:117] "RemoveContainer" containerID="c59b4f6d5b02fd4640ea7a51803ca4d31b0440d8779c46f342b1e5af2e8a2bf0" Oct 06 13:40:31 crc kubenswrapper[4757]: I1006 13:40:31.811453 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-58bhb_a6624d05-e024-49f2-bf87-33e7ea4fccbb/ovnkube-controller/3.log" Oct 06 13:40:31 crc kubenswrapper[4757]: I1006 13:40:31.814776 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" event={"ID":"a6624d05-e024-49f2-bf87-33e7ea4fccbb","Type":"ContainerStarted","Data":"3d34b3f2b73e53437a757437f264e99884eeedb564ce4e2a723a0345f0b383ab"} Oct 06 13:40:31 crc kubenswrapper[4757]: I1006 13:40:31.815220 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" Oct 06 13:40:31 crc kubenswrapper[4757]: I1006 13:40:31.852639 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" podStartSLOduration=99.852623854 podStartE2EDuration="1m39.852623854s" podCreationTimestamp="2025-10-06 13:38:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:40:31.851139682 +0000 UTC m=+120.348458219" watchObservedRunningTime="2025-10-06 13:40:31.852623854 +0000 UTC m=+120.349942391" Oct 06 13:40:32 crc kubenswrapper[4757]: I1006 13:40:32.076032 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-sc9qx"] Oct 06 13:40:32 crc kubenswrapper[4757]: I1006 13:40:32.076298 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-sc9qx" Oct 06 13:40:32 crc kubenswrapper[4757]: E1006 13:40:32.076488 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-sc9qx" podUID="8a0a24d2-8946-4710-91f2-cc59ecedb5e3" Oct 06 13:40:32 crc kubenswrapper[4757]: E1006 13:40:32.143641 4757 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Oct 06 13:40:32 crc kubenswrapper[4757]: I1006 13:40:32.179033 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 06 13:40:32 crc kubenswrapper[4757]: I1006 13:40:32.179069 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 06 13:40:32 crc kubenswrapper[4757]: E1006 13:40:32.180914 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 06 13:40:32 crc kubenswrapper[4757]: E1006 13:40:32.181118 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 06 13:40:32 crc kubenswrapper[4757]: E1006 13:40:32.299233 4757 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Oct 06 13:40:33 crc kubenswrapper[4757]: I1006 13:40:33.178997 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 06 13:40:33 crc kubenswrapper[4757]: E1006 13:40:33.179605 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 06 13:40:34 crc kubenswrapper[4757]: I1006 13:40:34.179450 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-sc9qx" Oct 06 13:40:34 crc kubenswrapper[4757]: I1006 13:40:34.179498 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 06 13:40:34 crc kubenswrapper[4757]: I1006 13:40:34.179473 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 06 13:40:34 crc kubenswrapper[4757]: E1006 13:40:34.179641 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-sc9qx" podUID="8a0a24d2-8946-4710-91f2-cc59ecedb5e3" Oct 06 13:40:34 crc kubenswrapper[4757]: E1006 13:40:34.179782 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 06 13:40:34 crc kubenswrapper[4757]: E1006 13:40:34.179914 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 06 13:40:35 crc kubenswrapper[4757]: I1006 13:40:35.179731 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 06 13:40:35 crc kubenswrapper[4757]: E1006 13:40:35.179929 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 06 13:40:36 crc kubenswrapper[4757]: I1006 13:40:36.179561 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 06 13:40:36 crc kubenswrapper[4757]: I1006 13:40:36.179616 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 06 13:40:36 crc kubenswrapper[4757]: E1006 13:40:36.179803 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 06 13:40:36 crc kubenswrapper[4757]: I1006 13:40:36.180088 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-sc9qx" Oct 06 13:40:36 crc kubenswrapper[4757]: E1006 13:40:36.180235 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 06 13:40:36 crc kubenswrapper[4757]: E1006 13:40:36.180432 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-sc9qx" podUID="8a0a24d2-8946-4710-91f2-cc59ecedb5e3" Oct 06 13:40:37 crc kubenswrapper[4757]: I1006 13:40:37.179573 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 06 13:40:37 crc kubenswrapper[4757]: E1006 13:40:37.179759 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 06 13:40:37 crc kubenswrapper[4757]: E1006 13:40:37.300590 4757 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Oct 06 13:40:38 crc kubenswrapper[4757]: I1006 13:40:38.179513 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-sc9qx" Oct 06 13:40:38 crc kubenswrapper[4757]: E1006 13:40:38.179715 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-sc9qx" podUID="8a0a24d2-8946-4710-91f2-cc59ecedb5e3" Oct 06 13:40:38 crc kubenswrapper[4757]: I1006 13:40:38.180033 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 06 13:40:38 crc kubenswrapper[4757]: E1006 13:40:38.180225 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 06 13:40:38 crc kubenswrapper[4757]: I1006 13:40:38.180431 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 06 13:40:38 crc kubenswrapper[4757]: E1006 13:40:38.180600 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 06 13:40:39 crc kubenswrapper[4757]: I1006 13:40:39.179136 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 06 13:40:39 crc kubenswrapper[4757]: E1006 13:40:39.179321 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 06 13:40:40 crc kubenswrapper[4757]: I1006 13:40:40.179756 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-sc9qx" Oct 06 13:40:40 crc kubenswrapper[4757]: I1006 13:40:40.179856 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 06 13:40:40 crc kubenswrapper[4757]: E1006 13:40:40.179922 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-sc9qx" podUID="8a0a24d2-8946-4710-91f2-cc59ecedb5e3" Oct 06 13:40:40 crc kubenswrapper[4757]: I1006 13:40:40.179952 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 06 13:40:40 crc kubenswrapper[4757]: E1006 13:40:40.180167 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 06 13:40:40 crc kubenswrapper[4757]: E1006 13:40:40.180214 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 06 13:40:41 crc kubenswrapper[4757]: I1006 13:40:41.179221 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 06 13:40:41 crc kubenswrapper[4757]: E1006 13:40:41.179505 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 06 13:40:41 crc kubenswrapper[4757]: I1006 13:40:41.179817 4757 scope.go:117] "RemoveContainer" containerID="ec9c673c59529f08cb919af940f8384584309e4e113c65431a529b935d1df3f9" Oct 06 13:40:41 crc kubenswrapper[4757]: I1006 13:40:41.850332 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-9qf7z_9144d9fd-70d7-4a29-8e6b-c020c611980a/kube-multus/1.log" Oct 06 13:40:41 crc kubenswrapper[4757]: I1006 13:40:41.850702 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-9qf7z" event={"ID":"9144d9fd-70d7-4a29-8e6b-c020c611980a","Type":"ContainerStarted","Data":"5fee59d9cfb29bcaa00f0f7e454083411d714d0dcdc7f6fb55333dd7c18d4f4b"} Oct 06 13:40:42 crc kubenswrapper[4757]: I1006 13:40:42.179854 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-sc9qx" Oct 06 13:40:42 crc kubenswrapper[4757]: I1006 13:40:42.179913 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 06 13:40:42 crc kubenswrapper[4757]: I1006 13:40:42.179880 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 06 13:40:42 crc kubenswrapper[4757]: E1006 13:40:42.180673 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-sc9qx" podUID="8a0a24d2-8946-4710-91f2-cc59ecedb5e3" Oct 06 13:40:42 crc kubenswrapper[4757]: E1006 13:40:42.180847 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 06 13:40:42 crc kubenswrapper[4757]: E1006 13:40:42.181015 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 06 13:40:42 crc kubenswrapper[4757]: E1006 13:40:42.301347 4757 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Oct 06 13:40:43 crc kubenswrapper[4757]: I1006 13:40:43.179539 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 06 13:40:43 crc kubenswrapper[4757]: E1006 13:40:43.179737 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 06 13:40:44 crc kubenswrapper[4757]: I1006 13:40:44.179848 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-sc9qx" Oct 06 13:40:44 crc kubenswrapper[4757]: I1006 13:40:44.179861 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 06 13:40:44 crc kubenswrapper[4757]: E1006 13:40:44.180065 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-sc9qx" podUID="8a0a24d2-8946-4710-91f2-cc59ecedb5e3" Oct 06 13:40:44 crc kubenswrapper[4757]: E1006 13:40:44.180211 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 06 13:40:44 crc kubenswrapper[4757]: I1006 13:40:44.179861 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 06 13:40:44 crc kubenswrapper[4757]: E1006 13:40:44.180360 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 06 13:40:45 crc kubenswrapper[4757]: I1006 13:40:45.179063 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 06 13:40:45 crc kubenswrapper[4757]: E1006 13:40:45.179323 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 06 13:40:46 crc kubenswrapper[4757]: I1006 13:40:46.180004 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 06 13:40:46 crc kubenswrapper[4757]: I1006 13:40:46.180068 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-sc9qx" Oct 06 13:40:46 crc kubenswrapper[4757]: E1006 13:40:46.180268 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 06 13:40:46 crc kubenswrapper[4757]: I1006 13:40:46.180396 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 06 13:40:46 crc kubenswrapper[4757]: E1006 13:40:46.180665 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-sc9qx" podUID="8a0a24d2-8946-4710-91f2-cc59ecedb5e3" Oct 06 13:40:46 crc kubenswrapper[4757]: E1006 13:40:46.180733 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 06 13:40:47 crc kubenswrapper[4757]: I1006 13:40:47.179392 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 06 13:40:47 crc kubenswrapper[4757]: E1006 13:40:47.179586 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 06 13:40:48 crc kubenswrapper[4757]: I1006 13:40:48.179557 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 06 13:40:48 crc kubenswrapper[4757]: I1006 13:40:48.179647 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 06 13:40:48 crc kubenswrapper[4757]: I1006 13:40:48.180082 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-sc9qx" Oct 06 13:40:48 crc kubenswrapper[4757]: I1006 13:40:48.183152 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Oct 06 13:40:48 crc kubenswrapper[4757]: I1006 13:40:48.183304 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Oct 06 13:40:48 crc kubenswrapper[4757]: I1006 13:40:48.183780 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Oct 06 13:40:48 crc kubenswrapper[4757]: I1006 13:40:48.183805 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Oct 06 13:40:48 crc kubenswrapper[4757]: I1006 13:40:48.183938 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Oct 06 13:40:48 crc kubenswrapper[4757]: I1006 13:40:48.184120 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Oct 06 13:40:49 crc kubenswrapper[4757]: I1006 13:40:49.179788 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.157528 4757 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.209564 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-ssqcq"] Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.210314 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-ssqcq" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.213582 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-hm8qr"] Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.214863 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-hm8qr" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.217433 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.217871 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.218251 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.218576 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.218869 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.222809 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.223575 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.223887 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.223976 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.224214 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.225365 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.227059 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.227313 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.227586 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.230782 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.238307 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.240449 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-jmp6b"] Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.241138 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-jmp6b" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.248982 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/7724900e-8239-400e-92a8-686e0c85f223-encryption-config\") pod \"apiserver-76f77b778f-hm8qr\" (UID: \"7724900e-8239-400e-92a8-686e0c85f223\") " pod="openshift-apiserver/apiserver-76f77b778f-hm8qr" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.249045 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7724900e-8239-400e-92a8-686e0c85f223-config\") pod \"apiserver-76f77b778f-hm8qr\" (UID: \"7724900e-8239-400e-92a8-686e0c85f223\") " pod="openshift-apiserver/apiserver-76f77b778f-hm8qr" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.249078 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/7724900e-8239-400e-92a8-686e0c85f223-audit-dir\") pod \"apiserver-76f77b778f-hm8qr\" (UID: \"7724900e-8239-400e-92a8-686e0c85f223\") " pod="openshift-apiserver/apiserver-76f77b778f-hm8qr" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.249219 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/7724900e-8239-400e-92a8-686e0c85f223-image-import-ca\") pod \"apiserver-76f77b778f-hm8qr\" (UID: \"7724900e-8239-400e-92a8-686e0c85f223\") " pod="openshift-apiserver/apiserver-76f77b778f-hm8qr" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.249333 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/7724900e-8239-400e-92a8-686e0c85f223-etcd-client\") pod \"apiserver-76f77b778f-hm8qr\" (UID: \"7724900e-8239-400e-92a8-686e0c85f223\") " pod="openshift-apiserver/apiserver-76f77b778f-hm8qr" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.249367 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.249374 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3d4a2fb3-0d5a-4be4-afe7-acc6e04917f9-serving-cert\") pod \"controller-manager-879f6c89f-ssqcq\" (UID: \"3d4a2fb3-0d5a-4be4-afe7-acc6e04917f9\") " pod="openshift-controller-manager/controller-manager-879f6c89f-ssqcq" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.249482 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/3d4a2fb3-0d5a-4be4-afe7-acc6e04917f9-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-ssqcq\" (UID: \"3d4a2fb3-0d5a-4be4-afe7-acc6e04917f9\") " pod="openshift-controller-manager/controller-manager-879f6c89f-ssqcq" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.249525 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/7724900e-8239-400e-92a8-686e0c85f223-node-pullsecrets\") pod \"apiserver-76f77b778f-hm8qr\" (UID: \"7724900e-8239-400e-92a8-686e0c85f223\") " pod="openshift-apiserver/apiserver-76f77b778f-hm8qr" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.249559 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/7724900e-8239-400e-92a8-686e0c85f223-etcd-serving-ca\") pod \"apiserver-76f77b778f-hm8qr\" (UID: \"7724900e-8239-400e-92a8-686e0c85f223\") " pod="openshift-apiserver/apiserver-76f77b778f-hm8qr" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.249591 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7724900e-8239-400e-92a8-686e0c85f223-trusted-ca-bundle\") pod \"apiserver-76f77b778f-hm8qr\" (UID: \"7724900e-8239-400e-92a8-686e0c85f223\") " pod="openshift-apiserver/apiserver-76f77b778f-hm8qr" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.249621 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3d4a2fb3-0d5a-4be4-afe7-acc6e04917f9-config\") pod \"controller-manager-879f6c89f-ssqcq\" (UID: \"3d4a2fb3-0d5a-4be4-afe7-acc6e04917f9\") " pod="openshift-controller-manager/controller-manager-879f6c89f-ssqcq" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.249712 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4c78v\" (UniqueName: \"kubernetes.io/projected/7724900e-8239-400e-92a8-686e0c85f223-kube-api-access-4c78v\") pod \"apiserver-76f77b778f-hm8qr\" (UID: \"7724900e-8239-400e-92a8-686e0c85f223\") " pod="openshift-apiserver/apiserver-76f77b778f-hm8qr" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.249744 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3d4a2fb3-0d5a-4be4-afe7-acc6e04917f9-client-ca\") pod \"controller-manager-879f6c89f-ssqcq\" (UID: \"3d4a2fb3-0d5a-4be4-afe7-acc6e04917f9\") " pod="openshift-controller-manager/controller-manager-879f6c89f-ssqcq" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.249803 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n28tn\" (UniqueName: \"kubernetes.io/projected/3d4a2fb3-0d5a-4be4-afe7-acc6e04917f9-kube-api-access-n28tn\") pod \"controller-manager-879f6c89f-ssqcq\" (UID: \"3d4a2fb3-0d5a-4be4-afe7-acc6e04917f9\") " pod="openshift-controller-manager/controller-manager-879f6c89f-ssqcq" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.249850 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7724900e-8239-400e-92a8-686e0c85f223-serving-cert\") pod \"apiserver-76f77b778f-hm8qr\" (UID: \"7724900e-8239-400e-92a8-686e0c85f223\") " pod="openshift-apiserver/apiserver-76f77b778f-hm8qr" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.249884 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/7724900e-8239-400e-92a8-686e0c85f223-audit\") pod \"apiserver-76f77b778f-hm8qr\" (UID: \"7724900e-8239-400e-92a8-686e0c85f223\") " pod="openshift-apiserver/apiserver-76f77b778f-hm8qr" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.250743 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.250912 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-sms26"] Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.250994 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.251431 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.251487 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-sms26" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.252384 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-kpprd"] Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.252850 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-kpprd" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.253827 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-pd4tr"] Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.254888 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-pd4tr" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.257485 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-qwn5v"] Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.258029 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-d2rg2"] Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.265541 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-d2rg2" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.272115 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.273611 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.274310 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-np4zr"] Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.291807 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.292388 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-qwn5v" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.293014 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.293381 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.295663 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.296221 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-vtmh7"] Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.297163 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.302963 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.303010 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-np4zr" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.303169 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.303334 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.303898 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.303969 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.304086 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.304466 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.305076 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-bgct2"] Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.305815 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-qg4qm"] Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.306244 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-xbn64"] Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.306587 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-w7rv5"] Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.306964 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-m4xmh"] Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.307269 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-zcwgr"] Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.307686 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.307794 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-qg4qm" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.307974 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-xbn64" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.308133 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-vtmh7" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.308171 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-zcwgr" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.308367 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-m4xmh" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.308386 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-bgct2" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.308128 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-w7rv5" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.313707 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.314881 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.317803 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.318049 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.318182 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.318257 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.318343 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.318434 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.318620 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.318711 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.318764 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-gmv42"] Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.318832 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.318891 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.319160 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.319293 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-gmv42" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.319296 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.319312 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.319377 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.319383 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.319653 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-5drtn"] Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.319681 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.319444 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.319765 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.319852 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.319914 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.319922 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.319942 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.320017 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-5drtn" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.323533 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.323753 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-99wzw"] Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.324522 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-99wzw" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.325956 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9c42f"] Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.327752 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9c42f" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.327884 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-zt9z4"] Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.328593 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-zt9z4" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.329694 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.330748 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-nm7z8"] Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.331568 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-nm7z8" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.337138 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-mtmpm"] Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.355115 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-sh6ck"] Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.355769 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-4bz5f"] Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.356967 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-4bz5f" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.361841 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.362008 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-mtmpm" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.362586 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-sh6ck" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.363008 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/7724900e-8239-400e-92a8-686e0c85f223-image-import-ca\") pod \"apiserver-76f77b778f-hm8qr\" (UID: \"7724900e-8239-400e-92a8-686e0c85f223\") " pod="openshift-apiserver/apiserver-76f77b778f-hm8qr" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.363073 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/7724900e-8239-400e-92a8-686e0c85f223-etcd-client\") pod \"apiserver-76f77b778f-hm8qr\" (UID: \"7724900e-8239-400e-92a8-686e0c85f223\") " pod="openshift-apiserver/apiserver-76f77b778f-hm8qr" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.363135 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hjktj\" (UniqueName: \"kubernetes.io/projected/561f71de-be19-49ac-a44d-cf527ed72cb6-kube-api-access-hjktj\") pod \"openshift-controller-manager-operator-756b6f6bc6-pd4tr\" (UID: \"561f71de-be19-49ac-a44d-cf527ed72cb6\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-pd4tr" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.363384 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3d4a2fb3-0d5a-4be4-afe7-acc6e04917f9-serving-cert\") pod \"controller-manager-879f6c89f-ssqcq\" (UID: \"3d4a2fb3-0d5a-4be4-afe7-acc6e04917f9\") " pod="openshift-controller-manager/controller-manager-879f6c89f-ssqcq" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.363656 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/db5c86e6-9e3c-42e8-b816-0dc876fef80e-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-4bz5f\" (UID: \"db5c86e6-9e3c-42e8-b816-0dc876fef80e\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-4bz5f" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.363946 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/db5c86e6-9e3c-42e8-b816-0dc876fef80e-config\") pod \"kube-controller-manager-operator-78b949d7b-4bz5f\" (UID: \"db5c86e6-9e3c-42e8-b816-0dc876fef80e\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-4bz5f" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.373444 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/0345b748-8161-40b5-bec8-0c36c2d87ea3-oauth-serving-cert\") pod \"console-f9d7485db-xbn64\" (UID: \"0345b748-8161-40b5-bec8-0c36c2d87ea3\") " pod="openshift-console/console-f9d7485db-xbn64" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.373505 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/3d4a2fb3-0d5a-4be4-afe7-acc6e04917f9-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-ssqcq\" (UID: \"3d4a2fb3-0d5a-4be4-afe7-acc6e04917f9\") " pod="openshift-controller-manager/controller-manager-879f6c89f-ssqcq" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.373534 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4845c7dd-e037-41f4-914b-bef0afffaad6-metrics-certs\") pod \"router-default-5444994796-mtmpm\" (UID: \"4845c7dd-e037-41f4-914b-bef0afffaad6\") " pod="openshift-ingress/router-default-5444994796-mtmpm" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.373551 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a650bded-eee2-45d7-a734-09077ffcafd3-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-sh6ck\" (UID: \"a650bded-eee2-45d7-a734-09077ffcafd3\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-sh6ck" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.373571 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/66e3315c-fd06-44fa-9a91-9e2e814618c4-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-bgct2\" (UID: \"66e3315c-fd06-44fa-9a91-9e2e814618c4\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-bgct2" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.366390 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.365918 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/7724900e-8239-400e-92a8-686e0c85f223-image-import-ca\") pod \"apiserver-76f77b778f-hm8qr\" (UID: \"7724900e-8239-400e-92a8-686e0c85f223\") " pod="openshift-apiserver/apiserver-76f77b778f-hm8qr" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.366540 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.366762 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.367039 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.367051 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.367159 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.367191 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.368354 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.368576 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.387295 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.388377 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.388485 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.388677 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.389023 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.389207 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.389270 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.389481 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.389536 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.389627 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.389813 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/7724900e-8239-400e-92a8-686e0c85f223-node-pullsecrets\") pod \"apiserver-76f77b778f-hm8qr\" (UID: \"7724900e-8239-400e-92a8-686e0c85f223\") " pod="openshift-apiserver/apiserver-76f77b778f-hm8qr" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.389854 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/0345b748-8161-40b5-bec8-0c36c2d87ea3-console-config\") pod \"console-f9d7485db-xbn64\" (UID: \"0345b748-8161-40b5-bec8-0c36c2d87ea3\") " pod="openshift-console/console-f9d7485db-xbn64" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.389890 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/66e3315c-fd06-44fa-9a91-9e2e814618c4-serving-cert\") pod \"apiserver-7bbb656c7d-bgct2\" (UID: \"66e3315c-fd06-44fa-9a91-9e2e814618c4\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-bgct2" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.389918 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/561f71de-be19-49ac-a44d-cf527ed72cb6-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-pd4tr\" (UID: \"561f71de-be19-49ac-a44d-cf527ed72cb6\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-pd4tr" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.389940 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/7724900e-8239-400e-92a8-686e0c85f223-etcd-serving-ca\") pod \"apiserver-76f77b778f-hm8qr\" (UID: \"7724900e-8239-400e-92a8-686e0c85f223\") " pod="openshift-apiserver/apiserver-76f77b778f-hm8qr" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.389960 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/db5c86e6-9e3c-42e8-b816-0dc876fef80e-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-4bz5f\" (UID: \"db5c86e6-9e3c-42e8-b816-0dc876fef80e\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-4bz5f" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.389983 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/4845c7dd-e037-41f4-914b-bef0afffaad6-default-certificate\") pod \"router-default-5444994796-mtmpm\" (UID: \"4845c7dd-e037-41f4-914b-bef0afffaad6\") " pod="openshift-ingress/router-default-5444994796-mtmpm" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.390002 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xm9vn\" (UniqueName: \"kubernetes.io/projected/ead6b56a-18cc-44d4-94bb-4aaecffc945f-kube-api-access-xm9vn\") pod \"console-operator-58897d9998-d2rg2\" (UID: \"ead6b56a-18cc-44d4-94bb-4aaecffc945f\") " pod="openshift-console-operator/console-operator-58897d9998-d2rg2" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.390017 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/37719f06-ec78-403f-81c3-d67831d1ce01-images\") pod \"machine-api-operator-5694c8668f-vtmh7\" (UID: \"37719f06-ec78-403f-81c3-d67831d1ce01\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-vtmh7" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.390035 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7724900e-8239-400e-92a8-686e0c85f223-trusted-ca-bundle\") pod \"apiserver-76f77b778f-hm8qr\" (UID: \"7724900e-8239-400e-92a8-686e0c85f223\") " pod="openshift-apiserver/apiserver-76f77b778f-hm8qr" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.390051 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jstcl\" (UniqueName: \"kubernetes.io/projected/0345b748-8161-40b5-bec8-0c36c2d87ea3-kube-api-access-jstcl\") pod \"console-f9d7485db-xbn64\" (UID: \"0345b748-8161-40b5-bec8-0c36c2d87ea3\") " pod="openshift-console/console-f9d7485db-xbn64" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.390070 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3d4a2fb3-0d5a-4be4-afe7-acc6e04917f9-config\") pod \"controller-manager-879f6c89f-ssqcq\" (UID: \"3d4a2fb3-0d5a-4be4-afe7-acc6e04917f9\") " pod="openshift-controller-manager/controller-manager-879f6c89f-ssqcq" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.390126 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4845c7dd-e037-41f4-914b-bef0afffaad6-service-ca-bundle\") pod \"router-default-5444994796-mtmpm\" (UID: \"4845c7dd-e037-41f4-914b-bef0afffaad6\") " pod="openshift-ingress/router-default-5444994796-mtmpm" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.390150 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a650bded-eee2-45d7-a734-09077ffcafd3-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-sh6ck\" (UID: \"a650bded-eee2-45d7-a734-09077ffcafd3\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-sh6ck" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.390164 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3d4a2fb3-0d5a-4be4-afe7-acc6e04917f9-serving-cert\") pod \"controller-manager-879f6c89f-ssqcq\" (UID: \"3d4a2fb3-0d5a-4be4-afe7-acc6e04917f9\") " pod="openshift-controller-manager/controller-manager-879f6c89f-ssqcq" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.390173 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/0345b748-8161-40b5-bec8-0c36c2d87ea3-console-serving-cert\") pod \"console-f9d7485db-xbn64\" (UID: \"0345b748-8161-40b5-bec8-0c36c2d87ea3\") " pod="openshift-console/console-f9d7485db-xbn64" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.390194 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/66e3315c-fd06-44fa-9a91-9e2e814618c4-etcd-client\") pod \"apiserver-7bbb656c7d-bgct2\" (UID: \"66e3315c-fd06-44fa-9a91-9e2e814618c4\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-bgct2" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.390238 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4c78v\" (UniqueName: \"kubernetes.io/projected/7724900e-8239-400e-92a8-686e0c85f223-kube-api-access-4c78v\") pod \"apiserver-76f77b778f-hm8qr\" (UID: \"7724900e-8239-400e-92a8-686e0c85f223\") " pod="openshift-apiserver/apiserver-76f77b778f-hm8qr" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.390264 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3d4a2fb3-0d5a-4be4-afe7-acc6e04917f9-client-ca\") pod \"controller-manager-879f6c89f-ssqcq\" (UID: \"3d4a2fb3-0d5a-4be4-afe7-acc6e04917f9\") " pod="openshift-controller-manager/controller-manager-879f6c89f-ssqcq" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.390288 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n28tn\" (UniqueName: \"kubernetes.io/projected/3d4a2fb3-0d5a-4be4-afe7-acc6e04917f9-kube-api-access-n28tn\") pod \"controller-manager-879f6c89f-ssqcq\" (UID: \"3d4a2fb3-0d5a-4be4-afe7-acc6e04917f9\") " pod="openshift-controller-manager/controller-manager-879f6c89f-ssqcq" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.390315 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ead6b56a-18cc-44d4-94bb-4aaecffc945f-trusted-ca\") pod \"console-operator-58897d9998-d2rg2\" (UID: \"ead6b56a-18cc-44d4-94bb-4aaecffc945f\") " pod="openshift-console-operator/console-operator-58897d9998-d2rg2" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.390338 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/37719f06-ec78-403f-81c3-d67831d1ce01-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-vtmh7\" (UID: \"37719f06-ec78-403f-81c3-d67831d1ce01\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-vtmh7" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.390372 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7724900e-8239-400e-92a8-686e0c85f223-serving-cert\") pod \"apiserver-76f77b778f-hm8qr\" (UID: \"7724900e-8239-400e-92a8-686e0c85f223\") " pod="openshift-apiserver/apiserver-76f77b778f-hm8qr" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.390395 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qs7xj\" (UniqueName: \"kubernetes.io/projected/4845c7dd-e037-41f4-914b-bef0afffaad6-kube-api-access-qs7xj\") pod \"router-default-5444994796-mtmpm\" (UID: \"4845c7dd-e037-41f4-914b-bef0afffaad6\") " pod="openshift-ingress/router-default-5444994796-mtmpm" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.390426 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/7724900e-8239-400e-92a8-686e0c85f223-audit\") pod \"apiserver-76f77b778f-hm8qr\" (UID: \"7724900e-8239-400e-92a8-686e0c85f223\") " pod="openshift-apiserver/apiserver-76f77b778f-hm8qr" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.390447 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ead6b56a-18cc-44d4-94bb-4aaecffc945f-config\") pod \"console-operator-58897d9998-d2rg2\" (UID: \"ead6b56a-18cc-44d4-94bb-4aaecffc945f\") " pod="openshift-console-operator/console-operator-58897d9998-d2rg2" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.390472 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5fsn9\" (UniqueName: \"kubernetes.io/projected/37719f06-ec78-403f-81c3-d67831d1ce01-kube-api-access-5fsn9\") pod \"machine-api-operator-5694c8668f-vtmh7\" (UID: \"37719f06-ec78-403f-81c3-d67831d1ce01\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-vtmh7" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.390499 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/37719f06-ec78-403f-81c3-d67831d1ce01-config\") pod \"machine-api-operator-5694c8668f-vtmh7\" (UID: \"37719f06-ec78-403f-81c3-d67831d1ce01\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-vtmh7" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.390531 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/66e3315c-fd06-44fa-9a91-9e2e814618c4-audit-policies\") pod \"apiserver-7bbb656c7d-bgct2\" (UID: \"66e3315c-fd06-44fa-9a91-9e2e814618c4\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-bgct2" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.390551 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cbc5w\" (UniqueName: \"kubernetes.io/projected/66e3315c-fd06-44fa-9a91-9e2e814618c4-kube-api-access-cbc5w\") pod \"apiserver-7bbb656c7d-bgct2\" (UID: \"66e3315c-fd06-44fa-9a91-9e2e814618c4\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-bgct2" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.390566 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/66e3315c-fd06-44fa-9a91-9e2e814618c4-audit-dir\") pod \"apiserver-7bbb656c7d-bgct2\" (UID: \"66e3315c-fd06-44fa-9a91-9e2e814618c4\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-bgct2" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.390583 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/561f71de-be19-49ac-a44d-cf527ed72cb6-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-pd4tr\" (UID: \"561f71de-be19-49ac-a44d-cf527ed72cb6\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-pd4tr" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.390598 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a650bded-eee2-45d7-a734-09077ffcafd3-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-sh6ck\" (UID: \"a650bded-eee2-45d7-a734-09077ffcafd3\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-sh6ck" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.390614 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/66e3315c-fd06-44fa-9a91-9e2e814618c4-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-bgct2\" (UID: \"66e3315c-fd06-44fa-9a91-9e2e814618c4\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-bgct2" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.390632 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/7724900e-8239-400e-92a8-686e0c85f223-encryption-config\") pod \"apiserver-76f77b778f-hm8qr\" (UID: \"7724900e-8239-400e-92a8-686e0c85f223\") " pod="openshift-apiserver/apiserver-76f77b778f-hm8qr" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.390648 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/4845c7dd-e037-41f4-914b-bef0afffaad6-stats-auth\") pod \"router-default-5444994796-mtmpm\" (UID: \"4845c7dd-e037-41f4-914b-bef0afffaad6\") " pod="openshift-ingress/router-default-5444994796-mtmpm" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.390662 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/66e3315c-fd06-44fa-9a91-9e2e814618c4-encryption-config\") pod \"apiserver-7bbb656c7d-bgct2\" (UID: \"66e3315c-fd06-44fa-9a91-9e2e814618c4\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-bgct2" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.390682 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/0345b748-8161-40b5-bec8-0c36c2d87ea3-console-oauth-config\") pod \"console-f9d7485db-xbn64\" (UID: \"0345b748-8161-40b5-bec8-0c36c2d87ea3\") " pod="openshift-console/console-f9d7485db-xbn64" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.390699 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7724900e-8239-400e-92a8-686e0c85f223-config\") pod \"apiserver-76f77b778f-hm8qr\" (UID: \"7724900e-8239-400e-92a8-686e0c85f223\") " pod="openshift-apiserver/apiserver-76f77b778f-hm8qr" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.390714 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/7724900e-8239-400e-92a8-686e0c85f223-audit-dir\") pod \"apiserver-76f77b778f-hm8qr\" (UID: \"7724900e-8239-400e-92a8-686e0c85f223\") " pod="openshift-apiserver/apiserver-76f77b778f-hm8qr" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.390730 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ead6b56a-18cc-44d4-94bb-4aaecffc945f-serving-cert\") pod \"console-operator-58897d9998-d2rg2\" (UID: \"ead6b56a-18cc-44d4-94bb-4aaecffc945f\") " pod="openshift-console-operator/console-operator-58897d9998-d2rg2" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.390754 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0345b748-8161-40b5-bec8-0c36c2d87ea3-service-ca\") pod \"console-f9d7485db-xbn64\" (UID: \"0345b748-8161-40b5-bec8-0c36c2d87ea3\") " pod="openshift-console/console-f9d7485db-xbn64" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.390769 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0345b748-8161-40b5-bec8-0c36c2d87ea3-trusted-ca-bundle\") pod \"console-f9d7485db-xbn64\" (UID: \"0345b748-8161-40b5-bec8-0c36c2d87ea3\") " pod="openshift-console/console-f9d7485db-xbn64" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.390906 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/7724900e-8239-400e-92a8-686e0c85f223-node-pullsecrets\") pod \"apiserver-76f77b778f-hm8qr\" (UID: \"7724900e-8239-400e-92a8-686e0c85f223\") " pod="openshift-apiserver/apiserver-76f77b778f-hm8qr" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.390611 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/7724900e-8239-400e-92a8-686e0c85f223-etcd-client\") pod \"apiserver-76f77b778f-hm8qr\" (UID: \"7724900e-8239-400e-92a8-686e0c85f223\") " pod="openshift-apiserver/apiserver-76f77b778f-hm8qr" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.389860 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.389877 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.389581 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.390263 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.390299 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.390441 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.390534 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.390594 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.390669 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.391987 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/7724900e-8239-400e-92a8-686e0c85f223-audit-dir\") pod \"apiserver-76f77b778f-hm8qr\" (UID: \"7724900e-8239-400e-92a8-686e0c85f223\") " pod="openshift-apiserver/apiserver-76f77b778f-hm8qr" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.392035 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.392132 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.392199 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.392948 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.393420 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/3d4a2fb3-0d5a-4be4-afe7-acc6e04917f9-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-ssqcq\" (UID: \"3d4a2fb3-0d5a-4be4-afe7-acc6e04917f9\") " pod="openshift-controller-manager/controller-manager-879f6c89f-ssqcq" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.394084 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7724900e-8239-400e-92a8-686e0c85f223-config\") pod \"apiserver-76f77b778f-hm8qr\" (UID: \"7724900e-8239-400e-92a8-686e0c85f223\") " pod="openshift-apiserver/apiserver-76f77b778f-hm8qr" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.394132 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.394583 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/7724900e-8239-400e-92a8-686e0c85f223-audit\") pod \"apiserver-76f77b778f-hm8qr\" (UID: \"7724900e-8239-400e-92a8-686e0c85f223\") " pod="openshift-apiserver/apiserver-76f77b778f-hm8qr" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.394920 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/7724900e-8239-400e-92a8-686e0c85f223-etcd-serving-ca\") pod \"apiserver-76f77b778f-hm8qr\" (UID: \"7724900e-8239-400e-92a8-686e0c85f223\") " pod="openshift-apiserver/apiserver-76f77b778f-hm8qr" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.395158 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.395182 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3d4a2fb3-0d5a-4be4-afe7-acc6e04917f9-client-ca\") pod \"controller-manager-879f6c89f-ssqcq\" (UID: \"3d4a2fb3-0d5a-4be4-afe7-acc6e04917f9\") " pod="openshift-controller-manager/controller-manager-879f6c89f-ssqcq" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.395244 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7724900e-8239-400e-92a8-686e0c85f223-trusted-ca-bundle\") pod \"apiserver-76f77b778f-hm8qr\" (UID: \"7724900e-8239-400e-92a8-686e0c85f223\") " pod="openshift-apiserver/apiserver-76f77b778f-hm8qr" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.395835 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3d4a2fb3-0d5a-4be4-afe7-acc6e04917f9-config\") pod \"controller-manager-879f6c89f-ssqcq\" (UID: \"3d4a2fb3-0d5a-4be4-afe7-acc6e04917f9\") " pod="openshift-controller-manager/controller-manager-879f6c89f-ssqcq" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.397446 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.397702 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.397928 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.398378 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-fggk5"] Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.399148 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-89rvd"] Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.399697 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-tddsx"] Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.399819 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-fggk5" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.399944 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-89rvd" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.400626 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/7724900e-8239-400e-92a8-686e0c85f223-encryption-config\") pod \"apiserver-76f77b778f-hm8qr\" (UID: \"7724900e-8239-400e-92a8-686e0c85f223\") " pod="openshift-apiserver/apiserver-76f77b778f-hm8qr" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.400765 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-tddsx" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.400946 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-fcpcg"] Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.401009 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.401159 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.401377 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-fcpcg" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.401609 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.403128 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-f5469"] Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.403679 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-bg97k"] Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.404154 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-x8wd5"] Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.404166 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-bg97k" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.404201 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-f5469" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.404992 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-x8wd5" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.405834 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7724900e-8239-400e-92a8-686e0c85f223-serving-cert\") pod \"apiserver-76f77b778f-hm8qr\" (UID: \"7724900e-8239-400e-92a8-686e0c85f223\") " pod="openshift-apiserver/apiserver-76f77b778f-hm8qr" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.413993 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.414126 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sbbvc"] Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.415296 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-75h8j"] Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.415612 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.415687 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-75h8j" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.415625 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-rdbhp"] Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.415966 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sbbvc" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.416303 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.416491 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-rdbhp" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.418264 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.418380 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-ssqcq"] Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.419837 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29329290-rk7ds"] Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.421186 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-gklxq"] Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.421580 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-gklxq" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.421581 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29329290-rk7ds" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.421841 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-wzttx"] Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.422390 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-wzttx" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.422906 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-hm8qr"] Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.426649 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-sms26"] Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.432063 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-d2rg2"] Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.433540 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.436314 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-pd4tr"] Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.436976 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-xbn64"] Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.441031 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-jmp6b"] Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.443037 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-qg4qm"] Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.445182 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-vtmh7"] Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.446726 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-bgct2"] Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.449731 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-m4xmh"] Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.451156 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9c42f"] Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.451903 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.454917 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-sh6ck"] Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.458397 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-zcwgr"] Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.469151 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-w7rv5"] Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.470932 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-fcpcg"] Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.472327 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-rvzds"] Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.472309 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.473464 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-kpprd"] Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.473567 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-rvzds" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.474466 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-f5469"] Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.475526 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-5drtn"] Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.476508 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-tddsx"] Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.477543 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-75h8j"] Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.478548 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-89rvd"] Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.479544 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-nm7z8"] Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.480660 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-np4zr"] Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.481519 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-zt9z4"] Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.482563 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-gmv42"] Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.484043 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-x8wd5"] Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.484584 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-rdbhp"] Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.485572 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-bg97k"] Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.488176 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-4bz5f"] Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.488204 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-99wzw"] Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.488617 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sbbvc"] Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.490705 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-fggk5"] Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.491809 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/db5c86e6-9e3c-42e8-b816-0dc876fef80e-config\") pod \"kube-controller-manager-operator-78b949d7b-4bz5f\" (UID: \"db5c86e6-9e3c-42e8-b816-0dc876fef80e\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-4bz5f" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.492023 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/0345b748-8161-40b5-bec8-0c36c2d87ea3-oauth-serving-cert\") pod \"console-f9d7485db-xbn64\" (UID: \"0345b748-8161-40b5-bec8-0c36c2d87ea3\") " pod="openshift-console/console-f9d7485db-xbn64" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.492886 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/0345b748-8161-40b5-bec8-0c36c2d87ea3-oauth-serving-cert\") pod \"console-f9d7485db-xbn64\" (UID: \"0345b748-8161-40b5-bec8-0c36c2d87ea3\") " pod="openshift-console/console-f9d7485db-xbn64" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.492994 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.493123 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-rvzds"] Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.493518 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/03f79109-aa85-47fe-9ebb-f14f313aa7f6-serving-cert\") pod \"openshift-config-operator-7777fb866f-qg4qm\" (UID: \"03f79109-aa85-47fe-9ebb-f14f313aa7f6\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-qg4qm" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.493801 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4845c7dd-e037-41f4-914b-bef0afffaad6-metrics-certs\") pod \"router-default-5444994796-mtmpm\" (UID: \"4845c7dd-e037-41f4-914b-bef0afffaad6\") " pod="openshift-ingress/router-default-5444994796-mtmpm" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.493962 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a650bded-eee2-45d7-a734-09077ffcafd3-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-sh6ck\" (UID: \"a650bded-eee2-45d7-a734-09077ffcafd3\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-sh6ck" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.494161 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/66e3315c-fd06-44fa-9a91-9e2e814618c4-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-bgct2\" (UID: \"66e3315c-fd06-44fa-9a91-9e2e814618c4\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-bgct2" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.494368 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/0345b748-8161-40b5-bec8-0c36c2d87ea3-console-config\") pod \"console-f9d7485db-xbn64\" (UID: \"0345b748-8161-40b5-bec8-0c36c2d87ea3\") " pod="openshift-console/console-f9d7485db-xbn64" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.494553 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/66e3315c-fd06-44fa-9a91-9e2e814618c4-serving-cert\") pod \"apiserver-7bbb656c7d-bgct2\" (UID: \"66e3315c-fd06-44fa-9a91-9e2e814618c4\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-bgct2" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.494750 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/561f71de-be19-49ac-a44d-cf527ed72cb6-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-pd4tr\" (UID: \"561f71de-be19-49ac-a44d-cf527ed72cb6\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-pd4tr" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.494859 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/db5c86e6-9e3c-42e8-b816-0dc876fef80e-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-4bz5f\" (UID: \"db5c86e6-9e3c-42e8-b816-0dc876fef80e\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-4bz5f" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.494971 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/10780426-2bc4-42e8-924d-2716489d3b83-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-9c42f\" (UID: \"10780426-2bc4-42e8-924d-2716489d3b83\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9c42f" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.495074 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/4845c7dd-e037-41f4-914b-bef0afffaad6-default-certificate\") pod \"router-default-5444994796-mtmpm\" (UID: \"4845c7dd-e037-41f4-914b-bef0afffaad6\") " pod="openshift-ingress/router-default-5444994796-mtmpm" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.495205 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/10780426-2bc4-42e8-924d-2716489d3b83-config\") pod \"kube-apiserver-operator-766d6c64bb-9c42f\" (UID: \"10780426-2bc4-42e8-924d-2716489d3b83\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9c42f" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.495330 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xm9vn\" (UniqueName: \"kubernetes.io/projected/ead6b56a-18cc-44d4-94bb-4aaecffc945f-kube-api-access-xm9vn\") pod \"console-operator-58897d9998-d2rg2\" (UID: \"ead6b56a-18cc-44d4-94bb-4aaecffc945f\") " pod="openshift-console-operator/console-operator-58897d9998-d2rg2" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.495425 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/37719f06-ec78-403f-81c3-d67831d1ce01-images\") pod \"machine-api-operator-5694c8668f-vtmh7\" (UID: \"37719f06-ec78-403f-81c3-d67831d1ce01\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-vtmh7" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.495511 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jstcl\" (UniqueName: \"kubernetes.io/projected/0345b748-8161-40b5-bec8-0c36c2d87ea3-kube-api-access-jstcl\") pod \"console-f9d7485db-xbn64\" (UID: \"0345b748-8161-40b5-bec8-0c36c2d87ea3\") " pod="openshift-console/console-f9d7485db-xbn64" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.495593 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4845c7dd-e037-41f4-914b-bef0afffaad6-service-ca-bundle\") pod \"router-default-5444994796-mtmpm\" (UID: \"4845c7dd-e037-41f4-914b-bef0afffaad6\") " pod="openshift-ingress/router-default-5444994796-mtmpm" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.495661 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29329290-rk7ds"] Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.495113 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/66e3315c-fd06-44fa-9a91-9e2e814618c4-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-bgct2\" (UID: \"66e3315c-fd06-44fa-9a91-9e2e814618c4\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-bgct2" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.495008 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/0345b748-8161-40b5-bec8-0c36c2d87ea3-console-config\") pod \"console-f9d7485db-xbn64\" (UID: \"0345b748-8161-40b5-bec8-0c36c2d87ea3\") " pod="openshift-console/console-f9d7485db-xbn64" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.495671 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a650bded-eee2-45d7-a734-09077ffcafd3-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-sh6ck\" (UID: \"a650bded-eee2-45d7-a734-09077ffcafd3\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-sh6ck" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.495884 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/0345b748-8161-40b5-bec8-0c36c2d87ea3-console-serving-cert\") pod \"console-f9d7485db-xbn64\" (UID: \"0345b748-8161-40b5-bec8-0c36c2d87ea3\") " pod="openshift-console/console-f9d7485db-xbn64" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.495924 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/66e3315c-fd06-44fa-9a91-9e2e814618c4-etcd-client\") pod \"apiserver-7bbb656c7d-bgct2\" (UID: \"66e3315c-fd06-44fa-9a91-9e2e814618c4\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-bgct2" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.495994 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ead6b56a-18cc-44d4-94bb-4aaecffc945f-trusted-ca\") pod \"console-operator-58897d9998-d2rg2\" (UID: \"ead6b56a-18cc-44d4-94bb-4aaecffc945f\") " pod="openshift-console-operator/console-operator-58897d9998-d2rg2" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.496021 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/37719f06-ec78-403f-81c3-d67831d1ce01-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-vtmh7\" (UID: \"37719f06-ec78-403f-81c3-d67831d1ce01\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-vtmh7" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.496072 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qs7xj\" (UniqueName: \"kubernetes.io/projected/4845c7dd-e037-41f4-914b-bef0afffaad6-kube-api-access-qs7xj\") pod \"router-default-5444994796-mtmpm\" (UID: \"4845c7dd-e037-41f4-914b-bef0afffaad6\") " pod="openshift-ingress/router-default-5444994796-mtmpm" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.496109 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/37719f06-ec78-403f-81c3-d67831d1ce01-images\") pod \"machine-api-operator-5694c8668f-vtmh7\" (UID: \"37719f06-ec78-403f-81c3-d67831d1ce01\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-vtmh7" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.496117 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ead6b56a-18cc-44d4-94bb-4aaecffc945f-config\") pod \"console-operator-58897d9998-d2rg2\" (UID: \"ead6b56a-18cc-44d4-94bb-4aaecffc945f\") " pod="openshift-console-operator/console-operator-58897d9998-d2rg2" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.496191 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5fsn9\" (UniqueName: \"kubernetes.io/projected/37719f06-ec78-403f-81c3-d67831d1ce01-kube-api-access-5fsn9\") pod \"machine-api-operator-5694c8668f-vtmh7\" (UID: \"37719f06-ec78-403f-81c3-d67831d1ce01\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-vtmh7" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.496215 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/37719f06-ec78-403f-81c3-d67831d1ce01-config\") pod \"machine-api-operator-5694c8668f-vtmh7\" (UID: \"37719f06-ec78-403f-81c3-d67831d1ce01\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-vtmh7" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.496244 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/66e3315c-fd06-44fa-9a91-9e2e814618c4-audit-policies\") pod \"apiserver-7bbb656c7d-bgct2\" (UID: \"66e3315c-fd06-44fa-9a91-9e2e814618c4\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-bgct2" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.496263 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cbc5w\" (UniqueName: \"kubernetes.io/projected/66e3315c-fd06-44fa-9a91-9e2e814618c4-kube-api-access-cbc5w\") pod \"apiserver-7bbb656c7d-bgct2\" (UID: \"66e3315c-fd06-44fa-9a91-9e2e814618c4\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-bgct2" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.496280 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/66e3315c-fd06-44fa-9a91-9e2e814618c4-audit-dir\") pod \"apiserver-7bbb656c7d-bgct2\" (UID: \"66e3315c-fd06-44fa-9a91-9e2e814618c4\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-bgct2" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.496303 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/561f71de-be19-49ac-a44d-cf527ed72cb6-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-pd4tr\" (UID: \"561f71de-be19-49ac-a44d-cf527ed72cb6\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-pd4tr" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.496494 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a650bded-eee2-45d7-a734-09077ffcafd3-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-sh6ck\" (UID: \"a650bded-eee2-45d7-a734-09077ffcafd3\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-sh6ck" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.496520 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/66e3315c-fd06-44fa-9a91-9e2e814618c4-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-bgct2\" (UID: \"66e3315c-fd06-44fa-9a91-9e2e814618c4\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-bgct2" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.496536 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/66e3315c-fd06-44fa-9a91-9e2e814618c4-audit-dir\") pod \"apiserver-7bbb656c7d-bgct2\" (UID: \"66e3315c-fd06-44fa-9a91-9e2e814618c4\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-bgct2" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.496547 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/10780426-2bc4-42e8-924d-2716489d3b83-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-9c42f\" (UID: \"10780426-2bc4-42e8-924d-2716489d3b83\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9c42f" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.496631 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/03f79109-aa85-47fe-9ebb-f14f313aa7f6-available-featuregates\") pod \"openshift-config-operator-7777fb866f-qg4qm\" (UID: \"03f79109-aa85-47fe-9ebb-f14f313aa7f6\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-qg4qm" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.496701 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/4845c7dd-e037-41f4-914b-bef0afffaad6-stats-auth\") pod \"router-default-5444994796-mtmpm\" (UID: \"4845c7dd-e037-41f4-914b-bef0afffaad6\") " pod="openshift-ingress/router-default-5444994796-mtmpm" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.496726 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/66e3315c-fd06-44fa-9a91-9e2e814618c4-encryption-config\") pod \"apiserver-7bbb656c7d-bgct2\" (UID: \"66e3315c-fd06-44fa-9a91-9e2e814618c4\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-bgct2" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.496759 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/0345b748-8161-40b5-bec8-0c36c2d87ea3-console-oauth-config\") pod \"console-f9d7485db-xbn64\" (UID: \"0345b748-8161-40b5-bec8-0c36c2d87ea3\") " pod="openshift-console/console-f9d7485db-xbn64" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.496788 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ead6b56a-18cc-44d4-94bb-4aaecffc945f-serving-cert\") pod \"console-operator-58897d9998-d2rg2\" (UID: \"ead6b56a-18cc-44d4-94bb-4aaecffc945f\") " pod="openshift-console-operator/console-operator-58897d9998-d2rg2" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.496829 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0345b748-8161-40b5-bec8-0c36c2d87ea3-service-ca\") pod \"console-f9d7485db-xbn64\" (UID: \"0345b748-8161-40b5-bec8-0c36c2d87ea3\") " pod="openshift-console/console-f9d7485db-xbn64" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.496855 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0345b748-8161-40b5-bec8-0c36c2d87ea3-trusted-ca-bundle\") pod \"console-f9d7485db-xbn64\" (UID: \"0345b748-8161-40b5-bec8-0c36c2d87ea3\") " pod="openshift-console/console-f9d7485db-xbn64" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.496898 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hjktj\" (UniqueName: \"kubernetes.io/projected/561f71de-be19-49ac-a44d-cf527ed72cb6-kube-api-access-hjktj\") pod \"openshift-controller-manager-operator-756b6f6bc6-pd4tr\" (UID: \"561f71de-be19-49ac-a44d-cf527ed72cb6\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-pd4tr" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.496924 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c7pfn\" (UniqueName: \"kubernetes.io/projected/03f79109-aa85-47fe-9ebb-f14f313aa7f6-kube-api-access-c7pfn\") pod \"openshift-config-operator-7777fb866f-qg4qm\" (UID: \"03f79109-aa85-47fe-9ebb-f14f313aa7f6\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-qg4qm" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.496953 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/db5c86e6-9e3c-42e8-b816-0dc876fef80e-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-4bz5f\" (UID: \"db5c86e6-9e3c-42e8-b816-0dc876fef80e\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-4bz5f" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.497244 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/561f71de-be19-49ac-a44d-cf527ed72cb6-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-pd4tr\" (UID: \"561f71de-be19-49ac-a44d-cf527ed72cb6\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-pd4tr" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.497469 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/66e3315c-fd06-44fa-9a91-9e2e814618c4-audit-policies\") pod \"apiserver-7bbb656c7d-bgct2\" (UID: \"66e3315c-fd06-44fa-9a91-9e2e814618c4\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-bgct2" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.497608 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/66e3315c-fd06-44fa-9a91-9e2e814618c4-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-bgct2\" (UID: \"66e3315c-fd06-44fa-9a91-9e2e814618c4\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-bgct2" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.497673 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ead6b56a-18cc-44d4-94bb-4aaecffc945f-config\") pod \"console-operator-58897d9998-d2rg2\" (UID: \"ead6b56a-18cc-44d4-94bb-4aaecffc945f\") " pod="openshift-console-operator/console-operator-58897d9998-d2rg2" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.497762 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/37719f06-ec78-403f-81c3-d67831d1ce01-config\") pod \"machine-api-operator-5694c8668f-vtmh7\" (UID: \"37719f06-ec78-403f-81c3-d67831d1ce01\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-vtmh7" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.498353 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0345b748-8161-40b5-bec8-0c36c2d87ea3-service-ca\") pod \"console-f9d7485db-xbn64\" (UID: \"0345b748-8161-40b5-bec8-0c36c2d87ea3\") " pod="openshift-console/console-f9d7485db-xbn64" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.499199 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0345b748-8161-40b5-bec8-0c36c2d87ea3-trusted-ca-bundle\") pod \"console-f9d7485db-xbn64\" (UID: \"0345b748-8161-40b5-bec8-0c36c2d87ea3\") " pod="openshift-console/console-f9d7485db-xbn64" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.499392 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/66e3315c-fd06-44fa-9a91-9e2e814618c4-etcd-client\") pod \"apiserver-7bbb656c7d-bgct2\" (UID: \"66e3315c-fd06-44fa-9a91-9e2e814618c4\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-bgct2" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.499599 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ead6b56a-18cc-44d4-94bb-4aaecffc945f-trusted-ca\") pod \"console-operator-58897d9998-d2rg2\" (UID: \"ead6b56a-18cc-44d4-94bb-4aaecffc945f\") " pod="openshift-console-operator/console-operator-58897d9998-d2rg2" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.499661 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/66e3315c-fd06-44fa-9a91-9e2e814618c4-serving-cert\") pod \"apiserver-7bbb656c7d-bgct2\" (UID: \"66e3315c-fd06-44fa-9a91-9e2e814618c4\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-bgct2" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.500429 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/0345b748-8161-40b5-bec8-0c36c2d87ea3-console-serving-cert\") pod \"console-f9d7485db-xbn64\" (UID: \"0345b748-8161-40b5-bec8-0c36c2d87ea3\") " pod="openshift-console/console-f9d7485db-xbn64" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.501519 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/0345b748-8161-40b5-bec8-0c36c2d87ea3-console-oauth-config\") pod \"console-f9d7485db-xbn64\" (UID: \"0345b748-8161-40b5-bec8-0c36c2d87ea3\") " pod="openshift-console/console-f9d7485db-xbn64" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.501689 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-wzttx"] Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.501723 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ead6b56a-18cc-44d4-94bb-4aaecffc945f-serving-cert\") pod \"console-operator-58897d9998-d2rg2\" (UID: \"ead6b56a-18cc-44d4-94bb-4aaecffc945f\") " pod="openshift-console-operator/console-operator-58897d9998-d2rg2" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.503954 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-wdjlv"] Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.504883 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-wdjlv" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.507725 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-2vt8x"] Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.508370 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-2vt8x" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.508989 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-gklxq"] Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.511198 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-wdjlv"] Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.513520 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.515456 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/66e3315c-fd06-44fa-9a91-9e2e814618c4-encryption-config\") pod \"apiserver-7bbb656c7d-bgct2\" (UID: \"66e3315c-fd06-44fa-9a91-9e2e814618c4\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-bgct2" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.515938 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-2vt8x"] Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.517713 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-6cwk6"] Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.518814 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-6cwk6" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.519389 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/37719f06-ec78-403f-81c3-d67831d1ce01-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-vtmh7\" (UID: \"37719f06-ec78-403f-81c3-d67831d1ce01\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-vtmh7" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.520588 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/561f71de-be19-49ac-a44d-cf527ed72cb6-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-pd4tr\" (UID: \"561f71de-be19-49ac-a44d-cf527ed72cb6\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-pd4tr" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.534360 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.557392 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.573654 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.593827 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.597881 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/10780426-2bc4-42e8-924d-2716489d3b83-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-9c42f\" (UID: \"10780426-2bc4-42e8-924d-2716489d3b83\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9c42f" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.597967 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/10780426-2bc4-42e8-924d-2716489d3b83-config\") pod \"kube-apiserver-operator-766d6c64bb-9c42f\" (UID: \"10780426-2bc4-42e8-924d-2716489d3b83\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9c42f" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.598212 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/10780426-2bc4-42e8-924d-2716489d3b83-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-9c42f\" (UID: \"10780426-2bc4-42e8-924d-2716489d3b83\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9c42f" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.598244 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/03f79109-aa85-47fe-9ebb-f14f313aa7f6-available-featuregates\") pod \"openshift-config-operator-7777fb866f-qg4qm\" (UID: \"03f79109-aa85-47fe-9ebb-f14f313aa7f6\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-qg4qm" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.598374 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c7pfn\" (UniqueName: \"kubernetes.io/projected/03f79109-aa85-47fe-9ebb-f14f313aa7f6-kube-api-access-c7pfn\") pod \"openshift-config-operator-7777fb866f-qg4qm\" (UID: \"03f79109-aa85-47fe-9ebb-f14f313aa7f6\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-qg4qm" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.598422 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/03f79109-aa85-47fe-9ebb-f14f313aa7f6-serving-cert\") pod \"openshift-config-operator-7777fb866f-qg4qm\" (UID: \"03f79109-aa85-47fe-9ebb-f14f313aa7f6\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-qg4qm" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.598792 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/03f79109-aa85-47fe-9ebb-f14f313aa7f6-available-featuregates\") pod \"openshift-config-operator-7777fb866f-qg4qm\" (UID: \"03f79109-aa85-47fe-9ebb-f14f313aa7f6\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-qg4qm" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.603500 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/03f79109-aa85-47fe-9ebb-f14f313aa7f6-serving-cert\") pod \"openshift-config-operator-7777fb866f-qg4qm\" (UID: \"03f79109-aa85-47fe-9ebb-f14f313aa7f6\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-qg4qm" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.612233 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.633692 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.652906 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.674397 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.681892 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/10780426-2bc4-42e8-924d-2716489d3b83-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-9c42f\" (UID: \"10780426-2bc4-42e8-924d-2716489d3b83\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9c42f" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.693670 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.712974 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.719157 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/10780426-2bc4-42e8-924d-2716489d3b83-config\") pod \"kube-apiserver-operator-766d6c64bb-9c42f\" (UID: \"10780426-2bc4-42e8-924d-2716489d3b83\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9c42f" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.732453 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.753489 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.772702 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.793294 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.813406 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.844699 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.853421 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.873280 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.893653 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.912750 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.934035 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.954892 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.973050 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.983778 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/db5c86e6-9e3c-42e8-b816-0dc876fef80e-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-4bz5f\" (UID: \"db5c86e6-9e3c-42e8-b816-0dc876fef80e\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-4bz5f" Oct 06 13:40:51 crc kubenswrapper[4757]: I1006 13:40:51.992520 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Oct 06 13:40:52 crc kubenswrapper[4757]: I1006 13:40:52.002906 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/db5c86e6-9e3c-42e8-b816-0dc876fef80e-config\") pod \"kube-controller-manager-operator-78b949d7b-4bz5f\" (UID: \"db5c86e6-9e3c-42e8-b816-0dc876fef80e\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-4bz5f" Oct 06 13:40:52 crc kubenswrapper[4757]: I1006 13:40:52.013369 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Oct 06 13:40:52 crc kubenswrapper[4757]: I1006 13:40:52.022293 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/4845c7dd-e037-41f4-914b-bef0afffaad6-stats-auth\") pod \"router-default-5444994796-mtmpm\" (UID: \"4845c7dd-e037-41f4-914b-bef0afffaad6\") " pod="openshift-ingress/router-default-5444994796-mtmpm" Oct 06 13:40:52 crc kubenswrapper[4757]: I1006 13:40:52.033430 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Oct 06 13:40:52 crc kubenswrapper[4757]: I1006 13:40:52.039080 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4845c7dd-e037-41f4-914b-bef0afffaad6-metrics-certs\") pod \"router-default-5444994796-mtmpm\" (UID: \"4845c7dd-e037-41f4-914b-bef0afffaad6\") " pod="openshift-ingress/router-default-5444994796-mtmpm" Oct 06 13:40:52 crc kubenswrapper[4757]: I1006 13:40:52.056998 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Oct 06 13:40:52 crc kubenswrapper[4757]: I1006 13:40:52.073210 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Oct 06 13:40:52 crc kubenswrapper[4757]: I1006 13:40:52.080964 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/4845c7dd-e037-41f4-914b-bef0afffaad6-default-certificate\") pod \"router-default-5444994796-mtmpm\" (UID: \"4845c7dd-e037-41f4-914b-bef0afffaad6\") " pod="openshift-ingress/router-default-5444994796-mtmpm" Oct 06 13:40:52 crc kubenswrapper[4757]: I1006 13:40:52.093262 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Oct 06 13:40:52 crc kubenswrapper[4757]: I1006 13:40:52.113349 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Oct 06 13:40:52 crc kubenswrapper[4757]: I1006 13:40:52.132557 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Oct 06 13:40:52 crc kubenswrapper[4757]: I1006 13:40:52.137970 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4845c7dd-e037-41f4-914b-bef0afffaad6-service-ca-bundle\") pod \"router-default-5444994796-mtmpm\" (UID: \"4845c7dd-e037-41f4-914b-bef0afffaad6\") " pod="openshift-ingress/router-default-5444994796-mtmpm" Oct 06 13:40:52 crc kubenswrapper[4757]: I1006 13:40:52.173495 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Oct 06 13:40:52 crc kubenswrapper[4757]: I1006 13:40:52.178836 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a650bded-eee2-45d7-a734-09077ffcafd3-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-sh6ck\" (UID: \"a650bded-eee2-45d7-a734-09077ffcafd3\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-sh6ck" Oct 06 13:40:52 crc kubenswrapper[4757]: I1006 13:40:52.216033 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n28tn\" (UniqueName: \"kubernetes.io/projected/3d4a2fb3-0d5a-4be4-afe7-acc6e04917f9-kube-api-access-n28tn\") pod \"controller-manager-879f6c89f-ssqcq\" (UID: \"3d4a2fb3-0d5a-4be4-afe7-acc6e04917f9\") " pod="openshift-controller-manager/controller-manager-879f6c89f-ssqcq" Oct 06 13:40:52 crc kubenswrapper[4757]: I1006 13:40:52.233430 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Oct 06 13:40:52 crc kubenswrapper[4757]: I1006 13:40:52.237703 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4c78v\" (UniqueName: \"kubernetes.io/projected/7724900e-8239-400e-92a8-686e0c85f223-kube-api-access-4c78v\") pod \"apiserver-76f77b778f-hm8qr\" (UID: \"7724900e-8239-400e-92a8-686e0c85f223\") " pod="openshift-apiserver/apiserver-76f77b778f-hm8qr" Oct 06 13:40:52 crc kubenswrapper[4757]: I1006 13:40:52.252656 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Oct 06 13:40:52 crc kubenswrapper[4757]: I1006 13:40:52.273299 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Oct 06 13:40:52 crc kubenswrapper[4757]: I1006 13:40:52.279299 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a650bded-eee2-45d7-a734-09077ffcafd3-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-sh6ck\" (UID: \"a650bded-eee2-45d7-a734-09077ffcafd3\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-sh6ck" Oct 06 13:40:52 crc kubenswrapper[4757]: I1006 13:40:52.312850 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Oct 06 13:40:52 crc kubenswrapper[4757]: I1006 13:40:52.334172 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Oct 06 13:40:52 crc kubenswrapper[4757]: I1006 13:40:52.353437 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Oct 06 13:40:52 crc kubenswrapper[4757]: I1006 13:40:52.373956 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Oct 06 13:40:52 crc kubenswrapper[4757]: I1006 13:40:52.393835 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Oct 06 13:40:52 crc kubenswrapper[4757]: I1006 13:40:52.411467 4757 request.go:700] Waited for 1.010500555s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-multus/secrets?fieldSelector=metadata.name%3Dmultus-admission-controller-secret&limit=500&resourceVersion=0 Oct 06 13:40:52 crc kubenswrapper[4757]: I1006 13:40:52.415147 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Oct 06 13:40:52 crc kubenswrapper[4757]: I1006 13:40:52.433080 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Oct 06 13:40:52 crc kubenswrapper[4757]: I1006 13:40:52.439647 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-ssqcq" Oct 06 13:40:52 crc kubenswrapper[4757]: I1006 13:40:52.448235 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-hm8qr" Oct 06 13:40:52 crc kubenswrapper[4757]: I1006 13:40:52.453703 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Oct 06 13:40:52 crc kubenswrapper[4757]: I1006 13:40:52.472586 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Oct 06 13:40:52 crc kubenswrapper[4757]: I1006 13:40:52.493684 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Oct 06 13:40:52 crc kubenswrapper[4757]: I1006 13:40:52.512709 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Oct 06 13:40:52 crc kubenswrapper[4757]: I1006 13:40:52.533286 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Oct 06 13:40:52 crc kubenswrapper[4757]: I1006 13:40:52.554680 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Oct 06 13:40:52 crc kubenswrapper[4757]: I1006 13:40:52.573198 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Oct 06 13:40:52 crc kubenswrapper[4757]: I1006 13:40:52.594669 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Oct 06 13:40:52 crc kubenswrapper[4757]: I1006 13:40:52.612804 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Oct 06 13:40:52 crc kubenswrapper[4757]: I1006 13:40:52.633104 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Oct 06 13:40:52 crc kubenswrapper[4757]: I1006 13:40:52.653195 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Oct 06 13:40:52 crc kubenswrapper[4757]: I1006 13:40:52.673720 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Oct 06 13:40:52 crc kubenswrapper[4757]: I1006 13:40:52.675181 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-ssqcq"] Oct 06 13:40:52 crc kubenswrapper[4757]: I1006 13:40:52.684157 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-hm8qr"] Oct 06 13:40:52 crc kubenswrapper[4757]: W1006 13:40:52.684484 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3d4a2fb3_0d5a_4be4_afe7_acc6e04917f9.slice/crio-fd13a70234db778f77fab891672d2261960df715fb7f8f6893ee58e865d76e02 WatchSource:0}: Error finding container fd13a70234db778f77fab891672d2261960df715fb7f8f6893ee58e865d76e02: Status 404 returned error can't find the container with id fd13a70234db778f77fab891672d2261960df715fb7f8f6893ee58e865d76e02 Oct 06 13:40:52 crc kubenswrapper[4757]: W1006 13:40:52.689561 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7724900e_8239_400e_92a8_686e0c85f223.slice/crio-722c34416ff1e942e3afb39bc637e52869d332306a0eda471a16788305fb24e9 WatchSource:0}: Error finding container 722c34416ff1e942e3afb39bc637e52869d332306a0eda471a16788305fb24e9: Status 404 returned error can't find the container with id 722c34416ff1e942e3afb39bc637e52869d332306a0eda471a16788305fb24e9 Oct 06 13:40:52 crc kubenswrapper[4757]: I1006 13:40:52.699592 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Oct 06 13:40:52 crc kubenswrapper[4757]: I1006 13:40:52.713052 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Oct 06 13:40:52 crc kubenswrapper[4757]: I1006 13:40:52.732151 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Oct 06 13:40:52 crc kubenswrapper[4757]: I1006 13:40:52.757732 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Oct 06 13:40:52 crc kubenswrapper[4757]: I1006 13:40:52.772630 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Oct 06 13:40:52 crc kubenswrapper[4757]: I1006 13:40:52.792511 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Oct 06 13:40:52 crc kubenswrapper[4757]: I1006 13:40:52.813200 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Oct 06 13:40:52 crc kubenswrapper[4757]: I1006 13:40:52.833034 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Oct 06 13:40:52 crc kubenswrapper[4757]: I1006 13:40:52.853532 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Oct 06 13:40:52 crc kubenswrapper[4757]: I1006 13:40:52.872523 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Oct 06 13:40:52 crc kubenswrapper[4757]: I1006 13:40:52.891914 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-hm8qr" event={"ID":"7724900e-8239-400e-92a8-686e0c85f223","Type":"ContainerStarted","Data":"68a4d3f9d5971d03c3c9a8007bf6d7c25dbec8e246edb9f032acf1942803d416"} Oct 06 13:40:52 crc kubenswrapper[4757]: I1006 13:40:52.892039 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-hm8qr" event={"ID":"7724900e-8239-400e-92a8-686e0c85f223","Type":"ContainerStarted","Data":"722c34416ff1e942e3afb39bc637e52869d332306a0eda471a16788305fb24e9"} Oct 06 13:40:52 crc kubenswrapper[4757]: I1006 13:40:52.892864 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Oct 06 13:40:52 crc kubenswrapper[4757]: I1006 13:40:52.893183 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-ssqcq" event={"ID":"3d4a2fb3-0d5a-4be4-afe7-acc6e04917f9","Type":"ContainerStarted","Data":"376643414d8cc06a7fe6f7258b4e3e6059a91369dca5d6165feac373605c482d"} Oct 06 13:40:52 crc kubenswrapper[4757]: I1006 13:40:52.893239 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-ssqcq" event={"ID":"3d4a2fb3-0d5a-4be4-afe7-acc6e04917f9","Type":"ContainerStarted","Data":"fd13a70234db778f77fab891672d2261960df715fb7f8f6893ee58e865d76e02"} Oct 06 13:40:52 crc kubenswrapper[4757]: I1006 13:40:52.893591 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-ssqcq" Oct 06 13:40:52 crc kubenswrapper[4757]: I1006 13:40:52.895630 4757 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-ssqcq container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.5:8443/healthz\": dial tcp 10.217.0.5:8443: connect: connection refused" start-of-body= Oct 06 13:40:52 crc kubenswrapper[4757]: I1006 13:40:52.895673 4757 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-ssqcq" podUID="3d4a2fb3-0d5a-4be4-afe7-acc6e04917f9" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.5:8443/healthz\": dial tcp 10.217.0.5:8443: connect: connection refused" Oct 06 13:40:52 crc kubenswrapper[4757]: I1006 13:40:52.913283 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Oct 06 13:40:52 crc kubenswrapper[4757]: I1006 13:40:52.933940 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Oct 06 13:40:52 crc kubenswrapper[4757]: I1006 13:40:52.953336 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Oct 06 13:40:52 crc kubenswrapper[4757]: I1006 13:40:52.973522 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Oct 06 13:40:52 crc kubenswrapper[4757]: I1006 13:40:52.992899 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.012216 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.033239 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.053086 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.072906 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.093831 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.113656 4757 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.133734 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.169285 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/db5c86e6-9e3c-42e8-b816-0dc876fef80e-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-4bz5f\" (UID: \"db5c86e6-9e3c-42e8-b816-0dc876fef80e\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-4bz5f" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.187004 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xm9vn\" (UniqueName: \"kubernetes.io/projected/ead6b56a-18cc-44d4-94bb-4aaecffc945f-kube-api-access-xm9vn\") pod \"console-operator-58897d9998-d2rg2\" (UID: \"ead6b56a-18cc-44d4-94bb-4aaecffc945f\") " pod="openshift-console-operator/console-operator-58897d9998-d2rg2" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.206666 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jstcl\" (UniqueName: \"kubernetes.io/projected/0345b748-8161-40b5-bec8-0c36c2d87ea3-kube-api-access-jstcl\") pod \"console-f9d7485db-xbn64\" (UID: \"0345b748-8161-40b5-bec8-0c36c2d87ea3\") " pod="openshift-console/console-f9d7485db-xbn64" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.225920 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cbc5w\" (UniqueName: \"kubernetes.io/projected/66e3315c-fd06-44fa-9a91-9e2e814618c4-kube-api-access-cbc5w\") pod \"apiserver-7bbb656c7d-bgct2\" (UID: \"66e3315c-fd06-44fa-9a91-9e2e814618c4\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-bgct2" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.236481 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-d2rg2" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.251754 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5fsn9\" (UniqueName: \"kubernetes.io/projected/37719f06-ec78-403f-81c3-d67831d1ce01-kube-api-access-5fsn9\") pod \"machine-api-operator-5694c8668f-vtmh7\" (UID: \"37719f06-ec78-403f-81c3-d67831d1ce01\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-vtmh7" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.268929 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qs7xj\" (UniqueName: \"kubernetes.io/projected/4845c7dd-e037-41f4-914b-bef0afffaad6-kube-api-access-qs7xj\") pod \"router-default-5444994796-mtmpm\" (UID: \"4845c7dd-e037-41f4-914b-bef0afffaad6\") " pod="openshift-ingress/router-default-5444994796-mtmpm" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.280836 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-xbn64" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.287133 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a650bded-eee2-45d7-a734-09077ffcafd3-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-sh6ck\" (UID: \"a650bded-eee2-45d7-a734-09077ffcafd3\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-sh6ck" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.288268 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-vtmh7" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.312635 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.325464 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hjktj\" (UniqueName: \"kubernetes.io/projected/561f71de-be19-49ac-a44d-cf527ed72cb6-kube-api-access-hjktj\") pod \"openshift-controller-manager-operator-756b6f6bc6-pd4tr\" (UID: \"561f71de-be19-49ac-a44d-cf527ed72cb6\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-pd4tr" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.332998 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.336426 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-bgct2" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.354245 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.374575 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.393165 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.401727 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-4bz5f" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.415219 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-sh6ck" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.421406 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.430832 4757 request.go:700] Waited for 1.921986253s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-ingress-canary/configmaps?fieldSelector=metadata.name%3Dkube-root-ca.crt&limit=500&resourceVersion=0 Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.433074 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.435430 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-d2rg2"] Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.454722 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.480104 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.493919 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.494769 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-xbn64"] Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.504085 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-pd4tr" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.526296 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-mtmpm" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.534784 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/10780426-2bc4-42e8-924d-2716489d3b83-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-9c42f\" (UID: \"10780426-2bc4-42e8-924d-2716489d3b83\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9c42f" Oct 06 13:40:53 crc kubenswrapper[4757]: W1006 13:40:53.535340 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0345b748_8161_40b5_bec8_0c36c2d87ea3.slice/crio-807b2c6022525d0d7a6521e60ee56f1fe8bb13b2d8f7d3e9233850f5e1ccc2d8 WatchSource:0}: Error finding container 807b2c6022525d0d7a6521e60ee56f1fe8bb13b2d8f7d3e9233850f5e1ccc2d8: Status 404 returned error can't find the container with id 807b2c6022525d0d7a6521e60ee56f1fe8bb13b2d8f7d3e9233850f5e1ccc2d8 Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.558010 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c7pfn\" (UniqueName: \"kubernetes.io/projected/03f79109-aa85-47fe-9ebb-f14f313aa7f6-kube-api-access-c7pfn\") pod \"openshift-config-operator-7777fb866f-qg4qm\" (UID: \"03f79109-aa85-47fe-9ebb-f14f313aa7f6\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-qg4qm" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.560675 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-vtmh7"] Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.573527 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-qg4qm" Oct 06 13:40:53 crc kubenswrapper[4757]: W1006 13:40:53.582946 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4845c7dd_e037_41f4_914b_bef0afffaad6.slice/crio-4b3b5179ded0d1e5bb1176a42e24868b265d42c0f48d579d513f5f7e2e963bef WatchSource:0}: Error finding container 4b3b5179ded0d1e5bb1176a42e24868b265d42c0f48d579d513f5f7e2e963bef: Status 404 returned error can't find the container with id 4b3b5179ded0d1e5bb1176a42e24868b265d42c0f48d579d513f5f7e2e963bef Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.599871 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-bgct2"] Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.627443 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/285b4432-8af1-4a51-8361-4a2266326d71-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-gmv42\" (UID: \"285b4432-8af1-4a51-8361-4a2266326d71\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-gmv42" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.627483 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/56a6b92e-fa3b-4944-b9a3-512abeb1892d-config\") pod \"machine-approver-56656f9798-qwn5v\" (UID: \"56a6b92e-fa3b-4944-b9a3-512abeb1892d\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-qwn5v" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.627503 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/2431e0a3-d977-4757-a8ed-7382ee29a08b-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-sms26\" (UID: \"2431e0a3-d977-4757-a8ed-7382ee29a08b\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-sms26" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.627524 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/74caa6de-7695-4dcb-9daf-f3368905de1c-client-ca\") pod \"route-controller-manager-6576b87f9c-np4zr\" (UID: \"74caa6de-7695-4dcb-9daf-f3368905de1c\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-np4zr" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.627541 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/8db12020-2fce-47b5-936e-e792b08976f0-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-zcwgr\" (UID: \"8db12020-2fce-47b5-936e-e792b08976f0\") " pod="openshift-authentication/oauth-openshift-558db77b4-zcwgr" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.627558 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/c2bf58ce-8c1e-4f9c-935e-3f61f3282676-etcd-ca\") pod \"etcd-operator-b45778765-m4xmh\" (UID: \"c2bf58ce-8c1e-4f9c-935e-3f61f3282676\") " pod="openshift-etcd-operator/etcd-operator-b45778765-m4xmh" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.627572 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/8db12020-2fce-47b5-936e-e792b08976f0-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-zcwgr\" (UID: \"8db12020-2fce-47b5-936e-e792b08976f0\") " pod="openshift-authentication/oauth-openshift-558db77b4-zcwgr" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.627926 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c2bf58ce-8c1e-4f9c-935e-3f61f3282676-config\") pod \"etcd-operator-b45778765-m4xmh\" (UID: \"c2bf58ce-8c1e-4f9c-935e-3f61f3282676\") " pod="openshift-etcd-operator/etcd-operator-b45778765-m4xmh" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.627968 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wkq76\" (UniqueName: \"kubernetes.io/projected/2431e0a3-d977-4757-a8ed-7382ee29a08b-kube-api-access-wkq76\") pod \"cluster-image-registry-operator-dc59b4c8b-sms26\" (UID: \"2431e0a3-d977-4757-a8ed-7382ee29a08b\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-sms26" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.627988 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p7kbs\" (UniqueName: \"kubernetes.io/projected/74caa6de-7695-4dcb-9daf-f3368905de1c-kube-api-access-p7kbs\") pod \"route-controller-manager-6576b87f9c-np4zr\" (UID: \"74caa6de-7695-4dcb-9daf-f3368905de1c\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-np4zr" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.628027 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5fjb9\" (UniqueName: \"kubernetes.io/projected/c2bf58ce-8c1e-4f9c-935e-3f61f3282676-kube-api-access-5fjb9\") pod \"etcd-operator-b45778765-m4xmh\" (UID: \"c2bf58ce-8c1e-4f9c-935e-3f61f3282676\") " pod="openshift-etcd-operator/etcd-operator-b45778765-m4xmh" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.628048 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/e86ac358-5c66-416e-a42d-5429b2d57c86-metrics-tls\") pod \"dns-operator-744455d44c-zt9z4\" (UID: \"e86ac358-5c66-416e-a42d-5429b2d57c86\") " pod="openshift-dns-operator/dns-operator-744455d44c-zt9z4" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.628067 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-748rs\" (UniqueName: \"kubernetes.io/projected/d4bf9975-f58d-4419-864d-88b28d436c56-kube-api-access-748rs\") pod \"cluster-samples-operator-665b6dd947-jmp6b\" (UID: \"d4bf9975-f58d-4419-864d-88b28d436c56\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-jmp6b" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.628108 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2a34da98-aa71-44c5-841b-1209779dadf4-serving-cert\") pod \"authentication-operator-69f744f599-5drtn\" (UID: \"2a34da98-aa71-44c5-841b-1209779dadf4\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-5drtn" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.628125 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/8db12020-2fce-47b5-936e-e792b08976f0-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-zcwgr\" (UID: \"8db12020-2fce-47b5-936e-e792b08976f0\") " pod="openshift-authentication/oauth-openshift-558db77b4-zcwgr" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.628163 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a7126d08-f833-4acc-b4f4-e7d4d88b00ca-bound-sa-token\") pod \"image-registry-697d97f7c8-99wzw\" (UID: \"a7126d08-f833-4acc-b4f4-e7d4d88b00ca\") " pod="openshift-image-registry/image-registry-697d97f7c8-99wzw" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.628177 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/74caa6de-7695-4dcb-9daf-f3368905de1c-config\") pod \"route-controller-manager-6576b87f9c-np4zr\" (UID: \"74caa6de-7695-4dcb-9daf-f3368905de1c\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-np4zr" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.628263 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8db12020-2fce-47b5-936e-e792b08976f0-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-zcwgr\" (UID: \"8db12020-2fce-47b5-936e-e792b08976f0\") " pod="openshift-authentication/oauth-openshift-558db77b4-zcwgr" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.628289 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gzh7w\" (UniqueName: \"kubernetes.io/projected/e86ac358-5c66-416e-a42d-5429b2d57c86-kube-api-access-gzh7w\") pod \"dns-operator-744455d44c-zt9z4\" (UID: \"e86ac358-5c66-416e-a42d-5429b2d57c86\") " pod="openshift-dns-operator/dns-operator-744455d44c-zt9z4" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.628373 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/a7126d08-f833-4acc-b4f4-e7d4d88b00ca-registry-certificates\") pod \"image-registry-697d97f7c8-99wzw\" (UID: \"a7126d08-f833-4acc-b4f4-e7d4d88b00ca\") " pod="openshift-image-registry/image-registry-697d97f7c8-99wzw" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.628393 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2431e0a3-d977-4757-a8ed-7382ee29a08b-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-sms26\" (UID: \"2431e0a3-d977-4757-a8ed-7382ee29a08b\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-sms26" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.628414 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2a34da98-aa71-44c5-841b-1209779dadf4-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-5drtn\" (UID: \"2a34da98-aa71-44c5-841b-1209779dadf4\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-5drtn" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.628434 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s478x\" (UniqueName: \"kubernetes.io/projected/56a6b92e-fa3b-4944-b9a3-512abeb1892d-kube-api-access-s478x\") pod \"machine-approver-56656f9798-qwn5v\" (UID: \"56a6b92e-fa3b-4944-b9a3-512abeb1892d\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-qwn5v" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.628450 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8v4bh\" (UniqueName: \"kubernetes.io/projected/2a34da98-aa71-44c5-841b-1209779dadf4-kube-api-access-8v4bh\") pod \"authentication-operator-69f744f599-5drtn\" (UID: \"2a34da98-aa71-44c5-841b-1209779dadf4\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-5drtn" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.628466 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/285b4432-8af1-4a51-8361-4a2266326d71-config\") pod \"openshift-apiserver-operator-796bbdcf4f-gmv42\" (UID: \"285b4432-8af1-4a51-8361-4a2266326d71\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-gmv42" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.628481 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/c2bf58ce-8c1e-4f9c-935e-3f61f3282676-etcd-client\") pod \"etcd-operator-b45778765-m4xmh\" (UID: \"c2bf58ce-8c1e-4f9c-935e-3f61f3282676\") " pod="openshift-etcd-operator/etcd-operator-b45778765-m4xmh" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.628497 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hk45v\" (UniqueName: \"kubernetes.io/projected/2ddf9932-df5d-40b6-88f8-ced01d618903-kube-api-access-hk45v\") pod \"downloads-7954f5f757-kpprd\" (UID: \"2ddf9932-df5d-40b6-88f8-ced01d618903\") " pod="openshift-console/downloads-7954f5f757-kpprd" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.628531 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dqhfs\" (UniqueName: \"kubernetes.io/projected/f3eed275-7acc-408b-8973-84d9c0fa817b-kube-api-access-dqhfs\") pod \"ingress-operator-5b745b69d9-nm7z8\" (UID: \"f3eed275-7acc-408b-8973-84d9c0fa817b\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-nm7z8" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.628558 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/a7126d08-f833-4acc-b4f4-e7d4d88b00ca-registry-tls\") pod \"image-registry-697d97f7c8-99wzw\" (UID: \"a7126d08-f833-4acc-b4f4-e7d4d88b00ca\") " pod="openshift-image-registry/image-registry-697d97f7c8-99wzw" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.628599 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/56a6b92e-fa3b-4944-b9a3-512abeb1892d-machine-approver-tls\") pod \"machine-approver-56656f9798-qwn5v\" (UID: \"56a6b92e-fa3b-4944-b9a3-512abeb1892d\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-qwn5v" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.628617 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dvnrm\" (UniqueName: \"kubernetes.io/projected/8db12020-2fce-47b5-936e-e792b08976f0-kube-api-access-dvnrm\") pod \"oauth-openshift-558db77b4-zcwgr\" (UID: \"8db12020-2fce-47b5-936e-e792b08976f0\") " pod="openshift-authentication/oauth-openshift-558db77b4-zcwgr" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.628677 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/8db12020-2fce-47b5-936e-e792b08976f0-audit-policies\") pod \"oauth-openshift-558db77b4-zcwgr\" (UID: \"8db12020-2fce-47b5-936e-e792b08976f0\") " pod="openshift-authentication/oauth-openshift-558db77b4-zcwgr" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.628699 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/8db12020-2fce-47b5-936e-e792b08976f0-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-zcwgr\" (UID: \"8db12020-2fce-47b5-936e-e792b08976f0\") " pod="openshift-authentication/oauth-openshift-558db77b4-zcwgr" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.628739 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/2431e0a3-d977-4757-a8ed-7382ee29a08b-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-sms26\" (UID: \"2431e0a3-d977-4757-a8ed-7382ee29a08b\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-sms26" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.628763 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/a7126d08-f833-4acc-b4f4-e7d4d88b00ca-ca-trust-extracted\") pod \"image-registry-697d97f7c8-99wzw\" (UID: \"a7126d08-f833-4acc-b4f4-e7d4d88b00ca\") " pod="openshift-image-registry/image-registry-697d97f7c8-99wzw" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.628780 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f3eed275-7acc-408b-8973-84d9c0fa817b-trusted-ca\") pod \"ingress-operator-5b745b69d9-nm7z8\" (UID: \"f3eed275-7acc-408b-8973-84d9c0fa817b\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-nm7z8" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.628820 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/8db12020-2fce-47b5-936e-e792b08976f0-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-zcwgr\" (UID: \"8db12020-2fce-47b5-936e-e792b08976f0\") " pod="openshift-authentication/oauth-openshift-558db77b4-zcwgr" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.628838 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/a7126d08-f833-4acc-b4f4-e7d4d88b00ca-installation-pull-secrets\") pod \"image-registry-697d97f7c8-99wzw\" (UID: \"a7126d08-f833-4acc-b4f4-e7d4d88b00ca\") " pod="openshift-image-registry/image-registry-697d97f7c8-99wzw" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.628868 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a7126d08-f833-4acc-b4f4-e7d4d88b00ca-trusted-ca\") pod \"image-registry-697d97f7c8-99wzw\" (UID: \"a7126d08-f833-4acc-b4f4-e7d4d88b00ca\") " pod="openshift-image-registry/image-registry-697d97f7c8-99wzw" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.628886 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/74caa6de-7695-4dcb-9daf-f3368905de1c-serving-cert\") pod \"route-controller-manager-6576b87f9c-np4zr\" (UID: \"74caa6de-7695-4dcb-9daf-f3368905de1c\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-np4zr" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.628904 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/8db12020-2fce-47b5-936e-e792b08976f0-audit-dir\") pod \"oauth-openshift-558db77b4-zcwgr\" (UID: \"8db12020-2fce-47b5-936e-e792b08976f0\") " pod="openshift-authentication/oauth-openshift-558db77b4-zcwgr" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.628933 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/56a6b92e-fa3b-4944-b9a3-512abeb1892d-auth-proxy-config\") pod \"machine-approver-56656f9798-qwn5v\" (UID: \"56a6b92e-fa3b-4944-b9a3-512abeb1892d\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-qwn5v" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.628952 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/c2bf58ce-8c1e-4f9c-935e-3f61f3282676-etcd-service-ca\") pod \"etcd-operator-b45778765-m4xmh\" (UID: \"c2bf58ce-8c1e-4f9c-935e-3f61f3282676\") " pod="openshift-etcd-operator/etcd-operator-b45778765-m4xmh" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.628975 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-99wzw\" (UID: \"a7126d08-f833-4acc-b4f4-e7d4d88b00ca\") " pod="openshift-image-registry/image-registry-697d97f7c8-99wzw" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.628997 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/f3eed275-7acc-408b-8973-84d9c0fa817b-metrics-tls\") pod \"ingress-operator-5b745b69d9-nm7z8\" (UID: \"f3eed275-7acc-408b-8973-84d9c0fa817b\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-nm7z8" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.629017 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/f3eed275-7acc-408b-8973-84d9c0fa817b-bound-sa-token\") pod \"ingress-operator-5b745b69d9-nm7z8\" (UID: \"f3eed275-7acc-408b-8973-84d9c0fa817b\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-nm7z8" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.629036 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2a34da98-aa71-44c5-841b-1209779dadf4-config\") pod \"authentication-operator-69f744f599-5drtn\" (UID: \"2a34da98-aa71-44c5-841b-1209779dadf4\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-5drtn" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.629055 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c2bf58ce-8c1e-4f9c-935e-3f61f3282676-serving-cert\") pod \"etcd-operator-b45778765-m4xmh\" (UID: \"c2bf58ce-8c1e-4f9c-935e-3f61f3282676\") " pod="openshift-etcd-operator/etcd-operator-b45778765-m4xmh" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.629075 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2a34da98-aa71-44c5-841b-1209779dadf4-service-ca-bundle\") pod \"authentication-operator-69f744f599-5drtn\" (UID: \"2a34da98-aa71-44c5-841b-1209779dadf4\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-5drtn" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.629113 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/8db12020-2fce-47b5-936e-e792b08976f0-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-zcwgr\" (UID: \"8db12020-2fce-47b5-936e-e792b08976f0\") " pod="openshift-authentication/oauth-openshift-558db77b4-zcwgr" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.629142 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/d4bf9975-f58d-4419-864d-88b28d436c56-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-jmp6b\" (UID: \"d4bf9975-f58d-4419-864d-88b28d436c56\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-jmp6b" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.629236 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/8db12020-2fce-47b5-936e-e792b08976f0-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-zcwgr\" (UID: \"8db12020-2fce-47b5-936e-e792b08976f0\") " pod="openshift-authentication/oauth-openshift-558db77b4-zcwgr" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.629289 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/8db12020-2fce-47b5-936e-e792b08976f0-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-zcwgr\" (UID: \"8db12020-2fce-47b5-936e-e792b08976f0\") " pod="openshift-authentication/oauth-openshift-558db77b4-zcwgr" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.629320 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/8db12020-2fce-47b5-936e-e792b08976f0-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-zcwgr\" (UID: \"8db12020-2fce-47b5-936e-e792b08976f0\") " pod="openshift-authentication/oauth-openshift-558db77b4-zcwgr" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.629366 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/8db12020-2fce-47b5-936e-e792b08976f0-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-zcwgr\" (UID: \"8db12020-2fce-47b5-936e-e792b08976f0\") " pod="openshift-authentication/oauth-openshift-558db77b4-zcwgr" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.629395 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mmd8g\" (UniqueName: \"kubernetes.io/projected/285b4432-8af1-4a51-8361-4a2266326d71-kube-api-access-mmd8g\") pod \"openshift-apiserver-operator-796bbdcf4f-gmv42\" (UID: \"285b4432-8af1-4a51-8361-4a2266326d71\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-gmv42" Oct 06 13:40:53 crc kubenswrapper[4757]: E1006 13:40:53.629402 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-06 13:40:54.129390086 +0000 UTC m=+142.626708723 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-99wzw" (UID: "a7126d08-f833-4acc-b4f4-e7d4d88b00ca") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.629428 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gcj5f\" (UniqueName: \"kubernetes.io/projected/a7126d08-f833-4acc-b4f4-e7d4d88b00ca-kube-api-access-gcj5f\") pod \"image-registry-697d97f7c8-99wzw\" (UID: \"a7126d08-f833-4acc-b4f4-e7d4d88b00ca\") " pod="openshift-image-registry/image-registry-697d97f7c8-99wzw" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.629528 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bs8r6\" (UniqueName: \"kubernetes.io/projected/a303cc7b-bff3-4fdd-8106-440b19c277d1-kube-api-access-bs8r6\") pod \"migrator-59844c95c7-w7rv5\" (UID: \"a303cc7b-bff3-4fdd-8106-440b19c277d1\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-w7rv5" Oct 06 13:40:53 crc kubenswrapper[4757]: W1006 13:40:53.633287 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod66e3315c_fd06_44fa_9a91_9e2e814618c4.slice/crio-ae4a081760117a69958f4dddc81372b24f8043eafa78a8c97c131b0ef0df03fe WatchSource:0}: Error finding container ae4a081760117a69958f4dddc81372b24f8043eafa78a8c97c131b0ef0df03fe: Status 404 returned error can't find the container with id ae4a081760117a69958f4dddc81372b24f8043eafa78a8c97c131b0ef0df03fe Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.680830 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9c42f" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.690591 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-4bz5f"] Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.711599 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-sh6ck"] Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.730209 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.730482 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gcj5f\" (UniqueName: \"kubernetes.io/projected/a7126d08-f833-4acc-b4f4-e7d4d88b00ca-kube-api-access-gcj5f\") pod \"image-registry-697d97f7c8-99wzw\" (UID: \"a7126d08-f833-4acc-b4f4-e7d4d88b00ca\") " pod="openshift-image-registry/image-registry-697d97f7c8-99wzw" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.730510 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bs8r6\" (UniqueName: \"kubernetes.io/projected/a303cc7b-bff3-4fdd-8106-440b19c277d1-kube-api-access-bs8r6\") pod \"migrator-59844c95c7-w7rv5\" (UID: \"a303cc7b-bff3-4fdd-8106-440b19c277d1\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-w7rv5" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.730540 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/0bf03ffd-be54-4071-ab97-013b5aa51a1b-images\") pod \"machine-config-operator-74547568cd-fggk5\" (UID: \"0bf03ffd-be54-4071-ab97-013b5aa51a1b\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-fggk5" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.730563 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7b93821f-dfad-44a5-a217-eb63987c1f0a-secret-volume\") pod \"collect-profiles-29329290-rk7ds\" (UID: \"7b93821f-dfad-44a5-a217-eb63987c1f0a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29329290-rk7ds" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.730624 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/ade88e7a-7f26-4fcb-b6fa-143a76a39783-node-bootstrap-token\") pod \"machine-config-server-6cwk6\" (UID: \"ade88e7a-7f26-4fcb-b6fa-143a76a39783\") " pod="openshift-machine-config-operator/machine-config-server-6cwk6" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.730658 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/285b4432-8af1-4a51-8361-4a2266326d71-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-gmv42\" (UID: \"285b4432-8af1-4a51-8361-4a2266326d71\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-gmv42" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.731640 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/90e799d2-898f-43e4-a937-940d2970d74e-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-fcpcg\" (UID: \"90e799d2-898f-43e4-a937-940d2970d74e\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-fcpcg" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.731685 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/56a6b92e-fa3b-4944-b9a3-512abeb1892d-config\") pod \"machine-approver-56656f9798-qwn5v\" (UID: \"56a6b92e-fa3b-4944-b9a3-512abeb1892d\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-qwn5v" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.731708 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/aa703301-7d55-4ee0-b118-61e91935f2de-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-89rvd\" (UID: \"aa703301-7d55-4ee0-b118-61e91935f2de\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-89rvd" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.731737 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/2431e0a3-d977-4757-a8ed-7382ee29a08b-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-sms26\" (UID: \"2431e0a3-d977-4757-a8ed-7382ee29a08b\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-sms26" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.731759 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/8c0d7122-aeb4-4292-a8fc-61cee9bf303c-signing-cabundle\") pod \"service-ca-9c57cc56f-gklxq\" (UID: \"8c0d7122-aeb4-4292-a8fc-61cee9bf303c\") " pod="openshift-service-ca/service-ca-9c57cc56f-gklxq" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.731779 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/ade88e7a-7f26-4fcb-b6fa-143a76a39783-certs\") pod \"machine-config-server-6cwk6\" (UID: \"ade88e7a-7f26-4fcb-b6fa-143a76a39783\") " pod="openshift-machine-config-operator/machine-config-server-6cwk6" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.731806 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/74caa6de-7695-4dcb-9daf-f3368905de1c-client-ca\") pod \"route-controller-manager-6576b87f9c-np4zr\" (UID: \"74caa6de-7695-4dcb-9daf-f3368905de1c\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-np4zr" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.731827 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/8db12020-2fce-47b5-936e-e792b08976f0-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-zcwgr\" (UID: \"8db12020-2fce-47b5-936e-e792b08976f0\") " pod="openshift-authentication/oauth-openshift-558db77b4-zcwgr" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.731851 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/c2bf58ce-8c1e-4f9c-935e-3f61f3282676-etcd-ca\") pod \"etcd-operator-b45778765-m4xmh\" (UID: \"c2bf58ce-8c1e-4f9c-935e-3f61f3282676\") " pod="openshift-etcd-operator/etcd-operator-b45778765-m4xmh" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.731875 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/65b9c584-73e5-4414-bc72-556aabda7064-config-volume\") pod \"dns-default-wdjlv\" (UID: \"65b9c584-73e5-4414-bc72-556aabda7064\") " pod="openshift-dns/dns-default-wdjlv" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.731898 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xjmbj\" (UniqueName: \"kubernetes.io/projected/26023e16-6f2a-416b-8ccd-b1ee21bd0a57-kube-api-access-xjmbj\") pod \"ingress-canary-2vt8x\" (UID: \"26023e16-6f2a-416b-8ccd-b1ee21bd0a57\") " pod="openshift-ingress-canary/ingress-canary-2vt8x" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.733048 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/56a6b92e-fa3b-4944-b9a3-512abeb1892d-config\") pod \"machine-approver-56656f9798-qwn5v\" (UID: \"56a6b92e-fa3b-4944-b9a3-512abeb1892d\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-qwn5v" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.733158 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/c2bf58ce-8c1e-4f9c-935e-3f61f3282676-etcd-ca\") pod \"etcd-operator-b45778765-m4xmh\" (UID: \"c2bf58ce-8c1e-4f9c-935e-3f61f3282676\") " pod="openshift-etcd-operator/etcd-operator-b45778765-m4xmh" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.733191 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/74caa6de-7695-4dcb-9daf-f3368905de1c-client-ca\") pod \"route-controller-manager-6576b87f9c-np4zr\" (UID: \"74caa6de-7695-4dcb-9daf-f3368905de1c\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-np4zr" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.736264 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/8db12020-2fce-47b5-936e-e792b08976f0-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-zcwgr\" (UID: \"8db12020-2fce-47b5-936e-e792b08976f0\") " pod="openshift-authentication/oauth-openshift-558db77b4-zcwgr" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.736352 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pxq9d\" (UniqueName: \"kubernetes.io/projected/65b9c584-73e5-4414-bc72-556aabda7064-kube-api-access-pxq9d\") pod \"dns-default-wdjlv\" (UID: \"65b9c584-73e5-4414-bc72-556aabda7064\") " pod="openshift-dns/dns-default-wdjlv" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.736431 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/4f8647a0-accb-4139-8720-290260f22bb0-profile-collector-cert\") pod \"olm-operator-6b444d44fb-wzttx\" (UID: \"4f8647a0-accb-4139-8720-290260f22bb0\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-wzttx" Oct 06 13:40:53 crc kubenswrapper[4757]: E1006 13:40:53.736462 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-06 13:40:54.236435479 +0000 UTC m=+142.733754016 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.736500 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/26023e16-6f2a-416b-8ccd-b1ee21bd0a57-cert\") pod \"ingress-canary-2vt8x\" (UID: \"26023e16-6f2a-416b-8ccd-b1ee21bd0a57\") " pod="openshift-ingress-canary/ingress-canary-2vt8x" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.736529 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/10a2c372-b600-4c78-a4d5-22f5f8c1e425-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-bg97k\" (UID: \"10a2c372-b600-4c78-a4d5-22f5f8c1e425\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-bg97k" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.736576 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c2bf58ce-8c1e-4f9c-935e-3f61f3282676-config\") pod \"etcd-operator-b45778765-m4xmh\" (UID: \"c2bf58ce-8c1e-4f9c-935e-3f61f3282676\") " pod="openshift-etcd-operator/etcd-operator-b45778765-m4xmh" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.736598 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wkq76\" (UniqueName: \"kubernetes.io/projected/2431e0a3-d977-4757-a8ed-7382ee29a08b-kube-api-access-wkq76\") pod \"cluster-image-registry-operator-dc59b4c8b-sms26\" (UID: \"2431e0a3-d977-4757-a8ed-7382ee29a08b\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-sms26" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.736641 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p8k2f\" (UniqueName: \"kubernetes.io/projected/57071541-5943-4293-ba92-c9fa3dc6ec00-kube-api-access-p8k2f\") pod \"multus-admission-controller-857f4d67dd-tddsx\" (UID: \"57071541-5943-4293-ba92-c9fa3dc6ec00\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-tddsx" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.736684 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p7kbs\" (UniqueName: \"kubernetes.io/projected/74caa6de-7695-4dcb-9daf-f3368905de1c-kube-api-access-p7kbs\") pod \"route-controller-manager-6576b87f9c-np4zr\" (UID: \"74caa6de-7695-4dcb-9daf-f3368905de1c\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-np4zr" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.736738 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5fjb9\" (UniqueName: \"kubernetes.io/projected/c2bf58ce-8c1e-4f9c-935e-3f61f3282676-kube-api-access-5fjb9\") pod \"etcd-operator-b45778765-m4xmh\" (UID: \"c2bf58ce-8c1e-4f9c-935e-3f61f3282676\") " pod="openshift-etcd-operator/etcd-operator-b45778765-m4xmh" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.736889 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/57071541-5943-4293-ba92-c9fa3dc6ec00-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-tddsx\" (UID: \"57071541-5943-4293-ba92-c9fa3dc6ec00\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-tddsx" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.736916 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0bf03ffd-be54-4071-ab97-013b5aa51a1b-auth-proxy-config\") pod \"machine-config-operator-74547568cd-fggk5\" (UID: \"0bf03ffd-be54-4071-ab97-013b5aa51a1b\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-fggk5" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.736962 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/e86ac358-5c66-416e-a42d-5429b2d57c86-metrics-tls\") pod \"dns-operator-744455d44c-zt9z4\" (UID: \"e86ac358-5c66-416e-a42d-5429b2d57c86\") " pod="openshift-dns-operator/dns-operator-744455d44c-zt9z4" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.736988 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/c11f3757-611f-48f6-a4c4-909f4f45ccdf-socket-dir\") pod \"csi-hostpathplugin-rvzds\" (UID: \"c11f3757-611f-48f6-a4c4-909f4f45ccdf\") " pod="hostpath-provisioner/csi-hostpathplugin-rvzds" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.737012 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/b5167c6e-fb82-4a79-a743-aed0c32e8f9a-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-x8wd5\" (UID: \"b5167c6e-fb82-4a79-a743-aed0c32e8f9a\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-x8wd5" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.737047 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-748rs\" (UniqueName: \"kubernetes.io/projected/d4bf9975-f58d-4419-864d-88b28d436c56-kube-api-access-748rs\") pod \"cluster-samples-operator-665b6dd947-jmp6b\" (UID: \"d4bf9975-f58d-4419-864d-88b28d436c56\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-jmp6b" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.737193 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-phh7f\" (UniqueName: \"kubernetes.io/projected/aa703301-7d55-4ee0-b118-61e91935f2de-kube-api-access-phh7f\") pod \"machine-config-controller-84d6567774-89rvd\" (UID: \"aa703301-7d55-4ee0-b118-61e91935f2de\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-89rvd" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.737239 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2a34da98-aa71-44c5-841b-1209779dadf4-serving-cert\") pod \"authentication-operator-69f744f599-5drtn\" (UID: \"2a34da98-aa71-44c5-841b-1209779dadf4\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-5drtn" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.737387 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/8db12020-2fce-47b5-936e-e792b08976f0-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-zcwgr\" (UID: \"8db12020-2fce-47b5-936e-e792b08976f0\") " pod="openshift-authentication/oauth-openshift-558db77b4-zcwgr" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.737407 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/90e799d2-898f-43e4-a937-940d2970d74e-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-fcpcg\" (UID: \"90e799d2-898f-43e4-a937-940d2970d74e\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-fcpcg" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.737424 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s2rtm\" (UniqueName: \"kubernetes.io/projected/b9804fd5-588e-4638-b1ff-f815e7b5f834-kube-api-access-s2rtm\") pod \"marketplace-operator-79b997595-f5469\" (UID: \"b9804fd5-588e-4638-b1ff-f815e7b5f834\") " pod="openshift-marketplace/marketplace-operator-79b997595-f5469" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.737440 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hb86r\" (UniqueName: \"kubernetes.io/projected/ade88e7a-7f26-4fcb-b6fa-143a76a39783-kube-api-access-hb86r\") pod \"machine-config-server-6cwk6\" (UID: \"ade88e7a-7f26-4fcb-b6fa-143a76a39783\") " pod="openshift-machine-config-operator/machine-config-server-6cwk6" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.738830 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e5d9c90b-be15-4ac9-b19c-6d69db82e58f-config\") pod \"service-ca-operator-777779d784-75h8j\" (UID: \"e5d9c90b-be15-4ac9-b19c-6d69db82e58f\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-75h8j" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.738862 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/0fc6af18-7b97-413d-98f6-df292aaf7e49-apiservice-cert\") pod \"packageserver-d55dfcdfc-sbbvc\" (UID: \"0fc6af18-7b97-413d-98f6-df292aaf7e49\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sbbvc" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.739449 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/2431e0a3-d977-4757-a8ed-7382ee29a08b-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-sms26\" (UID: \"2431e0a3-d977-4757-a8ed-7382ee29a08b\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-sms26" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.739581 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/a7126d08-f833-4acc-b4f4-e7d4d88b00ca-registry-certificates\") pod \"image-registry-697d97f7c8-99wzw\" (UID: \"a7126d08-f833-4acc-b4f4-e7d4d88b00ca\") " pod="openshift-image-registry/image-registry-697d97f7c8-99wzw" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.739775 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a7126d08-f833-4acc-b4f4-e7d4d88b00ca-bound-sa-token\") pod \"image-registry-697d97f7c8-99wzw\" (UID: \"a7126d08-f833-4acc-b4f4-e7d4d88b00ca\") " pod="openshift-image-registry/image-registry-697d97f7c8-99wzw" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.739813 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/74caa6de-7695-4dcb-9daf-f3368905de1c-config\") pod \"route-controller-manager-6576b87f9c-np4zr\" (UID: \"74caa6de-7695-4dcb-9daf-f3368905de1c\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-np4zr" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.739884 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8db12020-2fce-47b5-936e-e792b08976f0-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-zcwgr\" (UID: \"8db12020-2fce-47b5-936e-e792b08976f0\") " pod="openshift-authentication/oauth-openshift-558db77b4-zcwgr" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.739910 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gzh7w\" (UniqueName: \"kubernetes.io/projected/e86ac358-5c66-416e-a42d-5429b2d57c86-kube-api-access-gzh7w\") pod \"dns-operator-744455d44c-zt9z4\" (UID: \"e86ac358-5c66-416e-a42d-5429b2d57c86\") " pod="openshift-dns-operator/dns-operator-744455d44c-zt9z4" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.740011 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/51673d40-dc08-42c2-87ee-0256c42c66df-profile-collector-cert\") pod \"catalog-operator-68c6474976-rdbhp\" (UID: \"51673d40-dc08-42c2-87ee-0256c42c66df\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-rdbhp" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.740170 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2a34da98-aa71-44c5-841b-1209779dadf4-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-5drtn\" (UID: \"2a34da98-aa71-44c5-841b-1209779dadf4\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-5drtn" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.740201 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2431e0a3-d977-4757-a8ed-7382ee29a08b-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-sms26\" (UID: \"2431e0a3-d977-4757-a8ed-7382ee29a08b\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-sms26" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.740841 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/8db12020-2fce-47b5-936e-e792b08976f0-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-zcwgr\" (UID: \"8db12020-2fce-47b5-936e-e792b08976f0\") " pod="openshift-authentication/oauth-openshift-558db77b4-zcwgr" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.741207 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/74caa6de-7695-4dcb-9daf-f3368905de1c-config\") pod \"route-controller-manager-6576b87f9c-np4zr\" (UID: \"74caa6de-7695-4dcb-9daf-f3368905de1c\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-np4zr" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.742696 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s478x\" (UniqueName: \"kubernetes.io/projected/56a6b92e-fa3b-4944-b9a3-512abeb1892d-kube-api-access-s478x\") pod \"machine-approver-56656f9798-qwn5v\" (UID: \"56a6b92e-fa3b-4944-b9a3-512abeb1892d\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-qwn5v" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.742725 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8v4bh\" (UniqueName: \"kubernetes.io/projected/2a34da98-aa71-44c5-841b-1209779dadf4-kube-api-access-8v4bh\") pod \"authentication-operator-69f744f599-5drtn\" (UID: \"2a34da98-aa71-44c5-841b-1209779dadf4\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-5drtn" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.742879 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/285b4432-8af1-4a51-8361-4a2266326d71-config\") pod \"openshift-apiserver-operator-796bbdcf4f-gmv42\" (UID: \"285b4432-8af1-4a51-8361-4a2266326d71\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-gmv42" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.743072 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2431e0a3-d977-4757-a8ed-7382ee29a08b-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-sms26\" (UID: \"2431e0a3-d977-4757-a8ed-7382ee29a08b\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-sms26" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.743277 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8db12020-2fce-47b5-936e-e792b08976f0-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-zcwgr\" (UID: \"8db12020-2fce-47b5-936e-e792b08976f0\") " pod="openshift-authentication/oauth-openshift-558db77b4-zcwgr" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.743850 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-49tq4\" (UniqueName: \"kubernetes.io/projected/0bf03ffd-be54-4071-ab97-013b5aa51a1b-kube-api-access-49tq4\") pod \"machine-config-operator-74547568cd-fggk5\" (UID: \"0bf03ffd-be54-4071-ab97-013b5aa51a1b\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-fggk5" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.743898 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/51673d40-dc08-42c2-87ee-0256c42c66df-srv-cert\") pod \"catalog-operator-68c6474976-rdbhp\" (UID: \"51673d40-dc08-42c2-87ee-0256c42c66df\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-rdbhp" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.743930 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2rfgh\" (UniqueName: \"kubernetes.io/projected/10a2c372-b600-4c78-a4d5-22f5f8c1e425-kube-api-access-2rfgh\") pod \"control-plane-machine-set-operator-78cbb6b69f-bg97k\" (UID: \"10a2c372-b600-4c78-a4d5-22f5f8c1e425\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-bg97k" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.744006 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/c2bf58ce-8c1e-4f9c-935e-3f61f3282676-etcd-client\") pod \"etcd-operator-b45778765-m4xmh\" (UID: \"c2bf58ce-8c1e-4f9c-935e-3f61f3282676\") " pod="openshift-etcd-operator/etcd-operator-b45778765-m4xmh" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.744038 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hk45v\" (UniqueName: \"kubernetes.io/projected/2ddf9932-df5d-40b6-88f8-ced01d618903-kube-api-access-hk45v\") pod \"downloads-7954f5f757-kpprd\" (UID: \"2ddf9932-df5d-40b6-88f8-ced01d618903\") " pod="openshift-console/downloads-7954f5f757-kpprd" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.744038 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/285b4432-8af1-4a51-8361-4a2266326d71-config\") pod \"openshift-apiserver-operator-796bbdcf4f-gmv42\" (UID: \"285b4432-8af1-4a51-8361-4a2266326d71\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-gmv42" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.744225 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6gfb4\" (UniqueName: \"kubernetes.io/projected/51673d40-dc08-42c2-87ee-0256c42c66df-kube-api-access-6gfb4\") pod \"catalog-operator-68c6474976-rdbhp\" (UID: \"51673d40-dc08-42c2-87ee-0256c42c66df\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-rdbhp" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.744988 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dqhfs\" (UniqueName: \"kubernetes.io/projected/f3eed275-7acc-408b-8973-84d9c0fa817b-kube-api-access-dqhfs\") pod \"ingress-operator-5b745b69d9-nm7z8\" (UID: \"f3eed275-7acc-408b-8973-84d9c0fa817b\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-nm7z8" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.745027 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/c11f3757-611f-48f6-a4c4-909f4f45ccdf-registration-dir\") pod \"csi-hostpathplugin-rvzds\" (UID: \"c11f3757-611f-48f6-a4c4-909f4f45ccdf\") " pod="hostpath-provisioner/csi-hostpathplugin-rvzds" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.745081 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l8rdg\" (UniqueName: \"kubernetes.io/projected/0fc6af18-7b97-413d-98f6-df292aaf7e49-kube-api-access-l8rdg\") pod \"packageserver-d55dfcdfc-sbbvc\" (UID: \"0fc6af18-7b97-413d-98f6-df292aaf7e49\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sbbvc" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.745293 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/8db12020-2fce-47b5-936e-e792b08976f0-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-zcwgr\" (UID: \"8db12020-2fce-47b5-936e-e792b08976f0\") " pod="openshift-authentication/oauth-openshift-558db77b4-zcwgr" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.745311 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/e86ac358-5c66-416e-a42d-5429b2d57c86-metrics-tls\") pod \"dns-operator-744455d44c-zt9z4\" (UID: \"e86ac358-5c66-416e-a42d-5429b2d57c86\") " pod="openshift-dns-operator/dns-operator-744455d44c-zt9z4" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.745363 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/a7126d08-f833-4acc-b4f4-e7d4d88b00ca-registry-tls\") pod \"image-registry-697d97f7c8-99wzw\" (UID: \"a7126d08-f833-4acc-b4f4-e7d4d88b00ca\") " pod="openshift-image-registry/image-registry-697d97f7c8-99wzw" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.745428 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/56a6b92e-fa3b-4944-b9a3-512abeb1892d-machine-approver-tls\") pod \"machine-approver-56656f9798-qwn5v\" (UID: \"56a6b92e-fa3b-4944-b9a3-512abeb1892d\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-qwn5v" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.745460 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dvnrm\" (UniqueName: \"kubernetes.io/projected/8db12020-2fce-47b5-936e-e792b08976f0-kube-api-access-dvnrm\") pod \"oauth-openshift-558db77b4-zcwgr\" (UID: \"8db12020-2fce-47b5-936e-e792b08976f0\") " pod="openshift-authentication/oauth-openshift-558db77b4-zcwgr" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.745503 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0bf03ffd-be54-4071-ab97-013b5aa51a1b-proxy-tls\") pod \"machine-config-operator-74547568cd-fggk5\" (UID: \"0bf03ffd-be54-4071-ab97-013b5aa51a1b\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-fggk5" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.745549 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/4f8647a0-accb-4139-8720-290260f22bb0-srv-cert\") pod \"olm-operator-6b444d44fb-wzttx\" (UID: \"4f8647a0-accb-4139-8720-290260f22bb0\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-wzttx" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.745690 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4zblk\" (UniqueName: \"kubernetes.io/projected/c11f3757-611f-48f6-a4c4-909f4f45ccdf-kube-api-access-4zblk\") pod \"csi-hostpathplugin-rvzds\" (UID: \"c11f3757-611f-48f6-a4c4-909f4f45ccdf\") " pod="hostpath-provisioner/csi-hostpathplugin-rvzds" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.745901 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/8c0d7122-aeb4-4292-a8fc-61cee9bf303c-signing-key\") pod \"service-ca-9c57cc56f-gklxq\" (UID: \"8c0d7122-aeb4-4292-a8fc-61cee9bf303c\") " pod="openshift-service-ca/service-ca-9c57cc56f-gklxq" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.745956 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/8db12020-2fce-47b5-936e-e792b08976f0-audit-policies\") pod \"oauth-openshift-558db77b4-zcwgr\" (UID: \"8db12020-2fce-47b5-936e-e792b08976f0\") " pod="openshift-authentication/oauth-openshift-558db77b4-zcwgr" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.746044 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b9804fd5-588e-4638-b1ff-f815e7b5f834-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-f5469\" (UID: \"b9804fd5-588e-4638-b1ff-f815e7b5f834\") " pod="openshift-marketplace/marketplace-operator-79b997595-f5469" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.746071 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/0fc6af18-7b97-413d-98f6-df292aaf7e49-tmpfs\") pod \"packageserver-d55dfcdfc-sbbvc\" (UID: \"0fc6af18-7b97-413d-98f6-df292aaf7e49\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sbbvc" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.746134 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fq2dh\" (UniqueName: \"kubernetes.io/projected/7b93821f-dfad-44a5-a217-eb63987c1f0a-kube-api-access-fq2dh\") pod \"collect-profiles-29329290-rk7ds\" (UID: \"7b93821f-dfad-44a5-a217-eb63987c1f0a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29329290-rk7ds" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.746196 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/8db12020-2fce-47b5-936e-e792b08976f0-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-zcwgr\" (UID: \"8db12020-2fce-47b5-936e-e792b08976f0\") " pod="openshift-authentication/oauth-openshift-558db77b4-zcwgr" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.746223 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wvcrs\" (UniqueName: \"kubernetes.io/projected/e5d9c90b-be15-4ac9-b19c-6d69db82e58f-kube-api-access-wvcrs\") pod \"service-ca-operator-777779d784-75h8j\" (UID: \"e5d9c90b-be15-4ac9-b19c-6d69db82e58f\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-75h8j" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.746514 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c2bf58ce-8c1e-4f9c-935e-3f61f3282676-config\") pod \"etcd-operator-b45778765-m4xmh\" (UID: \"c2bf58ce-8c1e-4f9c-935e-3f61f3282676\") " pod="openshift-etcd-operator/etcd-operator-b45778765-m4xmh" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.746654 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/8db12020-2fce-47b5-936e-e792b08976f0-audit-policies\") pod \"oauth-openshift-558db77b4-zcwgr\" (UID: \"8db12020-2fce-47b5-936e-e792b08976f0\") " pod="openshift-authentication/oauth-openshift-558db77b4-zcwgr" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.746902 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2a34da98-aa71-44c5-841b-1209779dadf4-serving-cert\") pod \"authentication-operator-69f744f599-5drtn\" (UID: \"2a34da98-aa71-44c5-841b-1209779dadf4\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-5drtn" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.747264 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/2431e0a3-d977-4757-a8ed-7382ee29a08b-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-sms26\" (UID: \"2431e0a3-d977-4757-a8ed-7382ee29a08b\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-sms26" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.747428 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tb9rv\" (UniqueName: \"kubernetes.io/projected/90e799d2-898f-43e4-a937-940d2970d74e-kube-api-access-tb9rv\") pod \"kube-storage-version-migrator-operator-b67b599dd-fcpcg\" (UID: \"90e799d2-898f-43e4-a937-940d2970d74e\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-fcpcg" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.747545 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e5d9c90b-be15-4ac9-b19c-6d69db82e58f-serving-cert\") pod \"service-ca-operator-777779d784-75h8j\" (UID: \"e5d9c90b-be15-4ac9-b19c-6d69db82e58f\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-75h8j" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.748048 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/a7126d08-f833-4acc-b4f4-e7d4d88b00ca-ca-trust-extracted\") pod \"image-registry-697d97f7c8-99wzw\" (UID: \"a7126d08-f833-4acc-b4f4-e7d4d88b00ca\") " pod="openshift-image-registry/image-registry-697d97f7c8-99wzw" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.748258 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f3eed275-7acc-408b-8973-84d9c0fa817b-trusted-ca\") pod \"ingress-operator-5b745b69d9-nm7z8\" (UID: \"f3eed275-7acc-408b-8973-84d9c0fa817b\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-nm7z8" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.748285 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v44tk\" (UniqueName: \"kubernetes.io/projected/8c0d7122-aeb4-4292-a8fc-61cee9bf303c-kube-api-access-v44tk\") pod \"service-ca-9c57cc56f-gklxq\" (UID: \"8c0d7122-aeb4-4292-a8fc-61cee9bf303c\") " pod="openshift-service-ca/service-ca-9c57cc56f-gklxq" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.748327 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/c11f3757-611f-48f6-a4c4-909f4f45ccdf-mountpoint-dir\") pod \"csi-hostpathplugin-rvzds\" (UID: \"c11f3757-611f-48f6-a4c4-909f4f45ccdf\") " pod="hostpath-provisioner/csi-hostpathplugin-rvzds" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.748558 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/8db12020-2fce-47b5-936e-e792b08976f0-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-zcwgr\" (UID: \"8db12020-2fce-47b5-936e-e792b08976f0\") " pod="openshift-authentication/oauth-openshift-558db77b4-zcwgr" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.748669 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/a7126d08-f833-4acc-b4f4-e7d4d88b00ca-installation-pull-secrets\") pod \"image-registry-697d97f7c8-99wzw\" (UID: \"a7126d08-f833-4acc-b4f4-e7d4d88b00ca\") " pod="openshift-image-registry/image-registry-697d97f7c8-99wzw" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.748704 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/65b9c584-73e5-4414-bc72-556aabda7064-metrics-tls\") pod \"dns-default-wdjlv\" (UID: \"65b9c584-73e5-4414-bc72-556aabda7064\") " pod="openshift-dns/dns-default-wdjlv" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.748668 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/a7126d08-f833-4acc-b4f4-e7d4d88b00ca-ca-trust-extracted\") pod \"image-registry-697d97f7c8-99wzw\" (UID: \"a7126d08-f833-4acc-b4f4-e7d4d88b00ca\") " pod="openshift-image-registry/image-registry-697d97f7c8-99wzw" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.749006 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/c2bf58ce-8c1e-4f9c-935e-3f61f3282676-etcd-client\") pod \"etcd-operator-b45778765-m4xmh\" (UID: \"c2bf58ce-8c1e-4f9c-935e-3f61f3282676\") " pod="openshift-etcd-operator/etcd-operator-b45778765-m4xmh" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.749120 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wzmzj\" (UniqueName: \"kubernetes.io/projected/b5167c6e-fb82-4a79-a743-aed0c32e8f9a-kube-api-access-wzmzj\") pod \"package-server-manager-789f6589d5-x8wd5\" (UID: \"b5167c6e-fb82-4a79-a743-aed0c32e8f9a\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-x8wd5" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.749203 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a7126d08-f833-4acc-b4f4-e7d4d88b00ca-trusted-ca\") pod \"image-registry-697d97f7c8-99wzw\" (UID: \"a7126d08-f833-4acc-b4f4-e7d4d88b00ca\") " pod="openshift-image-registry/image-registry-697d97f7c8-99wzw" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.749231 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/74caa6de-7695-4dcb-9daf-f3368905de1c-serving-cert\") pod \"route-controller-manager-6576b87f9c-np4zr\" (UID: \"74caa6de-7695-4dcb-9daf-f3368905de1c\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-np4zr" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.749253 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/8db12020-2fce-47b5-936e-e792b08976f0-audit-dir\") pod \"oauth-openshift-558db77b4-zcwgr\" (UID: \"8db12020-2fce-47b5-936e-e792b08976f0\") " pod="openshift-authentication/oauth-openshift-558db77b4-zcwgr" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.749281 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/aa703301-7d55-4ee0-b118-61e91935f2de-proxy-tls\") pod \"machine-config-controller-84d6567774-89rvd\" (UID: \"aa703301-7d55-4ee0-b118-61e91935f2de\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-89rvd" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.749914 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/8db12020-2fce-47b5-936e-e792b08976f0-audit-dir\") pod \"oauth-openshift-558db77b4-zcwgr\" (UID: \"8db12020-2fce-47b5-936e-e792b08976f0\") " pod="openshift-authentication/oauth-openshift-558db77b4-zcwgr" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.750005 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/a7126d08-f833-4acc-b4f4-e7d4d88b00ca-registry-certificates\") pod \"image-registry-697d97f7c8-99wzw\" (UID: \"a7126d08-f833-4acc-b4f4-e7d4d88b00ca\") " pod="openshift-image-registry/image-registry-697d97f7c8-99wzw" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.750142 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/56a6b92e-fa3b-4944-b9a3-512abeb1892d-auth-proxy-config\") pod \"machine-approver-56656f9798-qwn5v\" (UID: \"56a6b92e-fa3b-4944-b9a3-512abeb1892d\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-qwn5v" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.750277 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/c2bf58ce-8c1e-4f9c-935e-3f61f3282676-etcd-service-ca\") pod \"etcd-operator-b45778765-m4xmh\" (UID: \"c2bf58ce-8c1e-4f9c-935e-3f61f3282676\") " pod="openshift-etcd-operator/etcd-operator-b45778765-m4xmh" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.750727 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-99wzw\" (UID: \"a7126d08-f833-4acc-b4f4-e7d4d88b00ca\") " pod="openshift-image-registry/image-registry-697d97f7c8-99wzw" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.750796 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/f3eed275-7acc-408b-8973-84d9c0fa817b-metrics-tls\") pod \"ingress-operator-5b745b69d9-nm7z8\" (UID: \"f3eed275-7acc-408b-8973-84d9c0fa817b\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-nm7z8" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.750860 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/c2bf58ce-8c1e-4f9c-935e-3f61f3282676-etcd-service-ca\") pod \"etcd-operator-b45778765-m4xmh\" (UID: \"c2bf58ce-8c1e-4f9c-935e-3f61f3282676\") " pod="openshift-etcd-operator/etcd-operator-b45778765-m4xmh" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.750926 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/f3eed275-7acc-408b-8973-84d9c0fa817b-bound-sa-token\") pod \"ingress-operator-5b745b69d9-nm7z8\" (UID: \"f3eed275-7acc-408b-8973-84d9c0fa817b\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-nm7z8" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.750950 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b9804fd5-588e-4638-b1ff-f815e7b5f834-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-f5469\" (UID: \"b9804fd5-588e-4638-b1ff-f815e7b5f834\") " pod="openshift-marketplace/marketplace-operator-79b997595-f5469" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.750949 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/a7126d08-f833-4acc-b4f4-e7d4d88b00ca-registry-tls\") pod \"image-registry-697d97f7c8-99wzw\" (UID: \"a7126d08-f833-4acc-b4f4-e7d4d88b00ca\") " pod="openshift-image-registry/image-registry-697d97f7c8-99wzw" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.751085 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a7126d08-f833-4acc-b4f4-e7d4d88b00ca-trusted-ca\") pod \"image-registry-697d97f7c8-99wzw\" (UID: \"a7126d08-f833-4acc-b4f4-e7d4d88b00ca\") " pod="openshift-image-registry/image-registry-697d97f7c8-99wzw" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.751230 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/8db12020-2fce-47b5-936e-e792b08976f0-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-zcwgr\" (UID: \"8db12020-2fce-47b5-936e-e792b08976f0\") " pod="openshift-authentication/oauth-openshift-558db77b4-zcwgr" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.751284 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2a34da98-aa71-44c5-841b-1209779dadf4-config\") pod \"authentication-operator-69f744f599-5drtn\" (UID: \"2a34da98-aa71-44c5-841b-1209779dadf4\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-5drtn" Oct 06 13:40:53 crc kubenswrapper[4757]: E1006 13:40:53.751317 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-06 13:40:54.251300487 +0000 UTC m=+142.748619214 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-99wzw" (UID: "a7126d08-f833-4acc-b4f4-e7d4d88b00ca") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.751890 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2a34da98-aa71-44c5-841b-1209779dadf4-config\") pod \"authentication-operator-69f744f599-5drtn\" (UID: \"2a34da98-aa71-44c5-841b-1209779dadf4\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-5drtn" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.751913 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f3eed275-7acc-408b-8973-84d9c0fa817b-trusted-ca\") pod \"ingress-operator-5b745b69d9-nm7z8\" (UID: \"f3eed275-7acc-408b-8973-84d9c0fa817b\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-nm7z8" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.752021 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/a7126d08-f833-4acc-b4f4-e7d4d88b00ca-installation-pull-secrets\") pod \"image-registry-697d97f7c8-99wzw\" (UID: \"a7126d08-f833-4acc-b4f4-e7d4d88b00ca\") " pod="openshift-image-registry/image-registry-697d97f7c8-99wzw" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.752158 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/56a6b92e-fa3b-4944-b9a3-512abeb1892d-auth-proxy-config\") pod \"machine-approver-56656f9798-qwn5v\" (UID: \"56a6b92e-fa3b-4944-b9a3-512abeb1892d\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-qwn5v" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.753418 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/74caa6de-7695-4dcb-9daf-f3368905de1c-serving-cert\") pod \"route-controller-manager-6576b87f9c-np4zr\" (UID: \"74caa6de-7695-4dcb-9daf-f3368905de1c\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-np4zr" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.754634 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/56a6b92e-fa3b-4944-b9a3-512abeb1892d-machine-approver-tls\") pod \"machine-approver-56656f9798-qwn5v\" (UID: \"56a6b92e-fa3b-4944-b9a3-512abeb1892d\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-qwn5v" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.755960 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2a34da98-aa71-44c5-841b-1209779dadf4-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-5drtn\" (UID: \"2a34da98-aa71-44c5-841b-1209779dadf4\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-5drtn" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.756030 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/8db12020-2fce-47b5-936e-e792b08976f0-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-zcwgr\" (UID: \"8db12020-2fce-47b5-936e-e792b08976f0\") " pod="openshift-authentication/oauth-openshift-558db77b4-zcwgr" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.756312 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/285b4432-8af1-4a51-8361-4a2266326d71-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-gmv42\" (UID: \"285b4432-8af1-4a51-8361-4a2266326d71\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-gmv42" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.756592 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c2bf58ce-8c1e-4f9c-935e-3f61f3282676-serving-cert\") pod \"etcd-operator-b45778765-m4xmh\" (UID: \"c2bf58ce-8c1e-4f9c-935e-3f61f3282676\") " pod="openshift-etcd-operator/etcd-operator-b45778765-m4xmh" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.756619 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/8db12020-2fce-47b5-936e-e792b08976f0-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-zcwgr\" (UID: \"8db12020-2fce-47b5-936e-e792b08976f0\") " pod="openshift-authentication/oauth-openshift-558db77b4-zcwgr" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.756635 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2a34da98-aa71-44c5-841b-1209779dadf4-service-ca-bundle\") pod \"authentication-operator-69f744f599-5drtn\" (UID: \"2a34da98-aa71-44c5-841b-1209779dadf4\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-5drtn" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.756658 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/8db12020-2fce-47b5-936e-e792b08976f0-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-zcwgr\" (UID: \"8db12020-2fce-47b5-936e-e792b08976f0\") " pod="openshift-authentication/oauth-openshift-558db77b4-zcwgr" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.756667 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/f3eed275-7acc-408b-8973-84d9c0fa817b-metrics-tls\") pod \"ingress-operator-5b745b69d9-nm7z8\" (UID: \"f3eed275-7acc-408b-8973-84d9c0fa817b\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-nm7z8" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.756678 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/d4bf9975-f58d-4419-864d-88b28d436c56-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-jmp6b\" (UID: \"d4bf9975-f58d-4419-864d-88b28d436c56\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-jmp6b" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.756699 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/0fc6af18-7b97-413d-98f6-df292aaf7e49-webhook-cert\") pod \"packageserver-d55dfcdfc-sbbvc\" (UID: \"0fc6af18-7b97-413d-98f6-df292aaf7e49\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sbbvc" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.756718 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7b93821f-dfad-44a5-a217-eb63987c1f0a-config-volume\") pod \"collect-profiles-29329290-rk7ds\" (UID: \"7b93821f-dfad-44a5-a217-eb63987c1f0a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29329290-rk7ds" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.756738 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/c11f3757-611f-48f6-a4c4-909f4f45ccdf-csi-data-dir\") pod \"csi-hostpathplugin-rvzds\" (UID: \"c11f3757-611f-48f6-a4c4-909f4f45ccdf\") " pod="hostpath-provisioner/csi-hostpathplugin-rvzds" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.756757 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/8db12020-2fce-47b5-936e-e792b08976f0-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-zcwgr\" (UID: \"8db12020-2fce-47b5-936e-e792b08976f0\") " pod="openshift-authentication/oauth-openshift-558db77b4-zcwgr" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.756788 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/8db12020-2fce-47b5-936e-e792b08976f0-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-zcwgr\" (UID: \"8db12020-2fce-47b5-936e-e792b08976f0\") " pod="openshift-authentication/oauth-openshift-558db77b4-zcwgr" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.756809 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/8db12020-2fce-47b5-936e-e792b08976f0-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-zcwgr\" (UID: \"8db12020-2fce-47b5-936e-e792b08976f0\") " pod="openshift-authentication/oauth-openshift-558db77b4-zcwgr" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.756826 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/c11f3757-611f-48f6-a4c4-909f4f45ccdf-plugins-dir\") pod \"csi-hostpathplugin-rvzds\" (UID: \"c11f3757-611f-48f6-a4c4-909f4f45ccdf\") " pod="hostpath-provisioner/csi-hostpathplugin-rvzds" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.756845 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/8db12020-2fce-47b5-936e-e792b08976f0-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-zcwgr\" (UID: \"8db12020-2fce-47b5-936e-e792b08976f0\") " pod="openshift-authentication/oauth-openshift-558db77b4-zcwgr" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.756866 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mmd8g\" (UniqueName: \"kubernetes.io/projected/285b4432-8af1-4a51-8361-4a2266326d71-kube-api-access-mmd8g\") pod \"openshift-apiserver-operator-796bbdcf4f-gmv42\" (UID: \"285b4432-8af1-4a51-8361-4a2266326d71\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-gmv42" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.756886 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gl45c\" (UniqueName: \"kubernetes.io/projected/4f8647a0-accb-4139-8720-290260f22bb0-kube-api-access-gl45c\") pod \"olm-operator-6b444d44fb-wzttx\" (UID: \"4f8647a0-accb-4139-8720-290260f22bb0\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-wzttx" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.758688 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/8db12020-2fce-47b5-936e-e792b08976f0-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-zcwgr\" (UID: \"8db12020-2fce-47b5-936e-e792b08976f0\") " pod="openshift-authentication/oauth-openshift-558db77b4-zcwgr" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.766509 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2a34da98-aa71-44c5-841b-1209779dadf4-service-ca-bundle\") pod \"authentication-operator-69f744f599-5drtn\" (UID: \"2a34da98-aa71-44c5-841b-1209779dadf4\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-5drtn" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.766820 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/8db12020-2fce-47b5-936e-e792b08976f0-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-zcwgr\" (UID: \"8db12020-2fce-47b5-936e-e792b08976f0\") " pod="openshift-authentication/oauth-openshift-558db77b4-zcwgr" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.767087 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c2bf58ce-8c1e-4f9c-935e-3f61f3282676-serving-cert\") pod \"etcd-operator-b45778765-m4xmh\" (UID: \"c2bf58ce-8c1e-4f9c-935e-3f61f3282676\") " pod="openshift-etcd-operator/etcd-operator-b45778765-m4xmh" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.767642 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/8db12020-2fce-47b5-936e-e792b08976f0-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-zcwgr\" (UID: \"8db12020-2fce-47b5-936e-e792b08976f0\") " pod="openshift-authentication/oauth-openshift-558db77b4-zcwgr" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.767745 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/8db12020-2fce-47b5-936e-e792b08976f0-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-zcwgr\" (UID: \"8db12020-2fce-47b5-936e-e792b08976f0\") " pod="openshift-authentication/oauth-openshift-558db77b4-zcwgr" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.776362 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/d4bf9975-f58d-4419-864d-88b28d436c56-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-jmp6b\" (UID: \"d4bf9975-f58d-4419-864d-88b28d436c56\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-jmp6b" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.778588 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/8db12020-2fce-47b5-936e-e792b08976f0-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-zcwgr\" (UID: \"8db12020-2fce-47b5-936e-e792b08976f0\") " pod="openshift-authentication/oauth-openshift-558db77b4-zcwgr" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.780772 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gcj5f\" (UniqueName: \"kubernetes.io/projected/a7126d08-f833-4acc-b4f4-e7d4d88b00ca-kube-api-access-gcj5f\") pod \"image-registry-697d97f7c8-99wzw\" (UID: \"a7126d08-f833-4acc-b4f4-e7d4d88b00ca\") " pod="openshift-image-registry/image-registry-697d97f7c8-99wzw" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.810528 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bs8r6\" (UniqueName: \"kubernetes.io/projected/a303cc7b-bff3-4fdd-8106-440b19c277d1-kube-api-access-bs8r6\") pod \"migrator-59844c95c7-w7rv5\" (UID: \"a303cc7b-bff3-4fdd-8106-440b19c277d1\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-w7rv5" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.838374 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wkq76\" (UniqueName: \"kubernetes.io/projected/2431e0a3-d977-4757-a8ed-7382ee29a08b-kube-api-access-wkq76\") pod \"cluster-image-registry-operator-dc59b4c8b-sms26\" (UID: \"2431e0a3-d977-4757-a8ed-7382ee29a08b\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-sms26" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.851565 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-pd4tr"] Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.858389 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5fjb9\" (UniqueName: \"kubernetes.io/projected/c2bf58ce-8c1e-4f9c-935e-3f61f3282676-kube-api-access-5fjb9\") pod \"etcd-operator-b45778765-m4xmh\" (UID: \"c2bf58ce-8c1e-4f9c-935e-3f61f3282676\") " pod="openshift-etcd-operator/etcd-operator-b45778765-m4xmh" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.860407 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 06 13:40:53 crc kubenswrapper[4757]: E1006 13:40:53.860507 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-06 13:40:54.360463488 +0000 UTC m=+142.857782025 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.860780 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/65b9c584-73e5-4414-bc72-556aabda7064-metrics-tls\") pod \"dns-default-wdjlv\" (UID: \"65b9c584-73e5-4414-bc72-556aabda7064\") " pod="openshift-dns/dns-default-wdjlv" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.860816 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wzmzj\" (UniqueName: \"kubernetes.io/projected/b5167c6e-fb82-4a79-a743-aed0c32e8f9a-kube-api-access-wzmzj\") pod \"package-server-manager-789f6589d5-x8wd5\" (UID: \"b5167c6e-fb82-4a79-a743-aed0c32e8f9a\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-x8wd5" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.860847 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/aa703301-7d55-4ee0-b118-61e91935f2de-proxy-tls\") pod \"machine-config-controller-84d6567774-89rvd\" (UID: \"aa703301-7d55-4ee0-b118-61e91935f2de\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-89rvd" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.860891 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-99wzw\" (UID: \"a7126d08-f833-4acc-b4f4-e7d4d88b00ca\") " pod="openshift-image-registry/image-registry-697d97f7c8-99wzw" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.860946 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b9804fd5-588e-4638-b1ff-f815e7b5f834-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-f5469\" (UID: \"b9804fd5-588e-4638-b1ff-f815e7b5f834\") " pod="openshift-marketplace/marketplace-operator-79b997595-f5469" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.861035 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/0fc6af18-7b97-413d-98f6-df292aaf7e49-webhook-cert\") pod \"packageserver-d55dfcdfc-sbbvc\" (UID: \"0fc6af18-7b97-413d-98f6-df292aaf7e49\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sbbvc" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.861055 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7b93821f-dfad-44a5-a217-eb63987c1f0a-config-volume\") pod \"collect-profiles-29329290-rk7ds\" (UID: \"7b93821f-dfad-44a5-a217-eb63987c1f0a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29329290-rk7ds" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.861075 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/c11f3757-611f-48f6-a4c4-909f4f45ccdf-csi-data-dir\") pod \"csi-hostpathplugin-rvzds\" (UID: \"c11f3757-611f-48f6-a4c4-909f4f45ccdf\") " pod="hostpath-provisioner/csi-hostpathplugin-rvzds" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.861130 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/c11f3757-611f-48f6-a4c4-909f4f45ccdf-plugins-dir\") pod \"csi-hostpathplugin-rvzds\" (UID: \"c11f3757-611f-48f6-a4c4-909f4f45ccdf\") " pod="hostpath-provisioner/csi-hostpathplugin-rvzds" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.861157 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gl45c\" (UniqueName: \"kubernetes.io/projected/4f8647a0-accb-4139-8720-290260f22bb0-kube-api-access-gl45c\") pod \"olm-operator-6b444d44fb-wzttx\" (UID: \"4f8647a0-accb-4139-8720-290260f22bb0\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-wzttx" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.861177 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/0bf03ffd-be54-4071-ab97-013b5aa51a1b-images\") pod \"machine-config-operator-74547568cd-fggk5\" (UID: \"0bf03ffd-be54-4071-ab97-013b5aa51a1b\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-fggk5" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.861172 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p7kbs\" (UniqueName: \"kubernetes.io/projected/74caa6de-7695-4dcb-9daf-f3368905de1c-kube-api-access-p7kbs\") pod \"route-controller-manager-6576b87f9c-np4zr\" (UID: \"74caa6de-7695-4dcb-9daf-f3368905de1c\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-np4zr" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.861610 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/c11f3757-611f-48f6-a4c4-909f4f45ccdf-plugins-dir\") pod \"csi-hostpathplugin-rvzds\" (UID: \"c11f3757-611f-48f6-a4c4-909f4f45ccdf\") " pod="hostpath-provisioner/csi-hostpathplugin-rvzds" Oct 06 13:40:53 crc kubenswrapper[4757]: E1006 13:40:53.862504 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-06 13:40:54.362494652 +0000 UTC m=+142.859813189 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-99wzw" (UID: "a7126d08-f833-4acc-b4f4-e7d4d88b00ca") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.862650 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/c11f3757-611f-48f6-a4c4-909f4f45ccdf-csi-data-dir\") pod \"csi-hostpathplugin-rvzds\" (UID: \"c11f3757-611f-48f6-a4c4-909f4f45ccdf\") " pod="hostpath-provisioner/csi-hostpathplugin-rvzds" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.861192 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7b93821f-dfad-44a5-a217-eb63987c1f0a-secret-volume\") pod \"collect-profiles-29329290-rk7ds\" (UID: \"7b93821f-dfad-44a5-a217-eb63987c1f0a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29329290-rk7ds" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.862707 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/ade88e7a-7f26-4fcb-b6fa-143a76a39783-node-bootstrap-token\") pod \"machine-config-server-6cwk6\" (UID: \"ade88e7a-7f26-4fcb-b6fa-143a76a39783\") " pod="openshift-machine-config-operator/machine-config-server-6cwk6" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.862736 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/90e799d2-898f-43e4-a937-940d2970d74e-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-fcpcg\" (UID: \"90e799d2-898f-43e4-a937-940d2970d74e\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-fcpcg" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.862763 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/aa703301-7d55-4ee0-b118-61e91935f2de-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-89rvd\" (UID: \"aa703301-7d55-4ee0-b118-61e91935f2de\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-89rvd" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.862788 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/8c0d7122-aeb4-4292-a8fc-61cee9bf303c-signing-cabundle\") pod \"service-ca-9c57cc56f-gklxq\" (UID: \"8c0d7122-aeb4-4292-a8fc-61cee9bf303c\") " pod="openshift-service-ca/service-ca-9c57cc56f-gklxq" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.862810 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/ade88e7a-7f26-4fcb-b6fa-143a76a39783-certs\") pod \"machine-config-server-6cwk6\" (UID: \"ade88e7a-7f26-4fcb-b6fa-143a76a39783\") " pod="openshift-machine-config-operator/machine-config-server-6cwk6" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.862831 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/65b9c584-73e5-4414-bc72-556aabda7064-config-volume\") pod \"dns-default-wdjlv\" (UID: \"65b9c584-73e5-4414-bc72-556aabda7064\") " pod="openshift-dns/dns-default-wdjlv" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.862854 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xjmbj\" (UniqueName: \"kubernetes.io/projected/26023e16-6f2a-416b-8ccd-b1ee21bd0a57-kube-api-access-xjmbj\") pod \"ingress-canary-2vt8x\" (UID: \"26023e16-6f2a-416b-8ccd-b1ee21bd0a57\") " pod="openshift-ingress-canary/ingress-canary-2vt8x" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.862881 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/4f8647a0-accb-4139-8720-290260f22bb0-profile-collector-cert\") pod \"olm-operator-6b444d44fb-wzttx\" (UID: \"4f8647a0-accb-4139-8720-290260f22bb0\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-wzttx" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.862925 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/26023e16-6f2a-416b-8ccd-b1ee21bd0a57-cert\") pod \"ingress-canary-2vt8x\" (UID: \"26023e16-6f2a-416b-8ccd-b1ee21bd0a57\") " pod="openshift-ingress-canary/ingress-canary-2vt8x" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.862956 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pxq9d\" (UniqueName: \"kubernetes.io/projected/65b9c584-73e5-4414-bc72-556aabda7064-kube-api-access-pxq9d\") pod \"dns-default-wdjlv\" (UID: \"65b9c584-73e5-4414-bc72-556aabda7064\") " pod="openshift-dns/dns-default-wdjlv" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.862980 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/10a2c372-b600-4c78-a4d5-22f5f8c1e425-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-bg97k\" (UID: \"10a2c372-b600-4c78-a4d5-22f5f8c1e425\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-bg97k" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.863011 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p8k2f\" (UniqueName: \"kubernetes.io/projected/57071541-5943-4293-ba92-c9fa3dc6ec00-kube-api-access-p8k2f\") pod \"multus-admission-controller-857f4d67dd-tddsx\" (UID: \"57071541-5943-4293-ba92-c9fa3dc6ec00\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-tddsx" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.863265 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/57071541-5943-4293-ba92-c9fa3dc6ec00-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-tddsx\" (UID: \"57071541-5943-4293-ba92-c9fa3dc6ec00\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-tddsx" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.863290 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0bf03ffd-be54-4071-ab97-013b5aa51a1b-auth-proxy-config\") pod \"machine-config-operator-74547568cd-fggk5\" (UID: \"0bf03ffd-be54-4071-ab97-013b5aa51a1b\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-fggk5" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.863324 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/c11f3757-611f-48f6-a4c4-909f4f45ccdf-socket-dir\") pod \"csi-hostpathplugin-rvzds\" (UID: \"c11f3757-611f-48f6-a4c4-909f4f45ccdf\") " pod="hostpath-provisioner/csi-hostpathplugin-rvzds" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.863346 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/b5167c6e-fb82-4a79-a743-aed0c32e8f9a-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-x8wd5\" (UID: \"b5167c6e-fb82-4a79-a743-aed0c32e8f9a\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-x8wd5" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.863399 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-phh7f\" (UniqueName: \"kubernetes.io/projected/aa703301-7d55-4ee0-b118-61e91935f2de-kube-api-access-phh7f\") pod \"machine-config-controller-84d6567774-89rvd\" (UID: \"aa703301-7d55-4ee0-b118-61e91935f2de\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-89rvd" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.863425 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/90e799d2-898f-43e4-a937-940d2970d74e-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-fcpcg\" (UID: \"90e799d2-898f-43e4-a937-940d2970d74e\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-fcpcg" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.863450 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2rtm\" (UniqueName: \"kubernetes.io/projected/b9804fd5-588e-4638-b1ff-f815e7b5f834-kube-api-access-s2rtm\") pod \"marketplace-operator-79b997595-f5469\" (UID: \"b9804fd5-588e-4638-b1ff-f815e7b5f834\") " pod="openshift-marketplace/marketplace-operator-79b997595-f5469" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.863474 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hb86r\" (UniqueName: \"kubernetes.io/projected/ade88e7a-7f26-4fcb-b6fa-143a76a39783-kube-api-access-hb86r\") pod \"machine-config-server-6cwk6\" (UID: \"ade88e7a-7f26-4fcb-b6fa-143a76a39783\") " pod="openshift-machine-config-operator/machine-config-server-6cwk6" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.863521 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e5d9c90b-be15-4ac9-b19c-6d69db82e58f-config\") pod \"service-ca-operator-777779d784-75h8j\" (UID: \"e5d9c90b-be15-4ac9-b19c-6d69db82e58f\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-75h8j" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.863544 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/0fc6af18-7b97-413d-98f6-df292aaf7e49-apiservice-cert\") pod \"packageserver-d55dfcdfc-sbbvc\" (UID: \"0fc6af18-7b97-413d-98f6-df292aaf7e49\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sbbvc" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.863572 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/0bf03ffd-be54-4071-ab97-013b5aa51a1b-images\") pod \"machine-config-operator-74547568cd-fggk5\" (UID: \"0bf03ffd-be54-4071-ab97-013b5aa51a1b\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-fggk5" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.863587 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/51673d40-dc08-42c2-87ee-0256c42c66df-profile-collector-cert\") pod \"catalog-operator-68c6474976-rdbhp\" (UID: \"51673d40-dc08-42c2-87ee-0256c42c66df\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-rdbhp" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.863633 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-49tq4\" (UniqueName: \"kubernetes.io/projected/0bf03ffd-be54-4071-ab97-013b5aa51a1b-kube-api-access-49tq4\") pod \"machine-config-operator-74547568cd-fggk5\" (UID: \"0bf03ffd-be54-4071-ab97-013b5aa51a1b\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-fggk5" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.863654 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/51673d40-dc08-42c2-87ee-0256c42c66df-srv-cert\") pod \"catalog-operator-68c6474976-rdbhp\" (UID: \"51673d40-dc08-42c2-87ee-0256c42c66df\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-rdbhp" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.863678 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2rfgh\" (UniqueName: \"kubernetes.io/projected/10a2c372-b600-4c78-a4d5-22f5f8c1e425-kube-api-access-2rfgh\") pod \"control-plane-machine-set-operator-78cbb6b69f-bg97k\" (UID: \"10a2c372-b600-4c78-a4d5-22f5f8c1e425\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-bg97k" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.863714 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6gfb4\" (UniqueName: \"kubernetes.io/projected/51673d40-dc08-42c2-87ee-0256c42c66df-kube-api-access-6gfb4\") pod \"catalog-operator-68c6474976-rdbhp\" (UID: \"51673d40-dc08-42c2-87ee-0256c42c66df\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-rdbhp" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.863746 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/c11f3757-611f-48f6-a4c4-909f4f45ccdf-registration-dir\") pod \"csi-hostpathplugin-rvzds\" (UID: \"c11f3757-611f-48f6-a4c4-909f4f45ccdf\") " pod="hostpath-provisioner/csi-hostpathplugin-rvzds" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.863766 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l8rdg\" (UniqueName: \"kubernetes.io/projected/0fc6af18-7b97-413d-98f6-df292aaf7e49-kube-api-access-l8rdg\") pod \"packageserver-d55dfcdfc-sbbvc\" (UID: \"0fc6af18-7b97-413d-98f6-df292aaf7e49\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sbbvc" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.863786 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0bf03ffd-be54-4071-ab97-013b5aa51a1b-proxy-tls\") pod \"machine-config-operator-74547568cd-fggk5\" (UID: \"0bf03ffd-be54-4071-ab97-013b5aa51a1b\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-fggk5" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.863808 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/4f8647a0-accb-4139-8720-290260f22bb0-srv-cert\") pod \"olm-operator-6b444d44fb-wzttx\" (UID: \"4f8647a0-accb-4139-8720-290260f22bb0\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-wzttx" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.863842 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4zblk\" (UniqueName: \"kubernetes.io/projected/c11f3757-611f-48f6-a4c4-909f4f45ccdf-kube-api-access-4zblk\") pod \"csi-hostpathplugin-rvzds\" (UID: \"c11f3757-611f-48f6-a4c4-909f4f45ccdf\") " pod="hostpath-provisioner/csi-hostpathplugin-rvzds" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.863877 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/8c0d7122-aeb4-4292-a8fc-61cee9bf303c-signing-key\") pod \"service-ca-9c57cc56f-gklxq\" (UID: \"8c0d7122-aeb4-4292-a8fc-61cee9bf303c\") " pod="openshift-service-ca/service-ca-9c57cc56f-gklxq" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.863900 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b9804fd5-588e-4638-b1ff-f815e7b5f834-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-f5469\" (UID: \"b9804fd5-588e-4638-b1ff-f815e7b5f834\") " pod="openshift-marketplace/marketplace-operator-79b997595-f5469" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.863924 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/0fc6af18-7b97-413d-98f6-df292aaf7e49-tmpfs\") pod \"packageserver-d55dfcdfc-sbbvc\" (UID: \"0fc6af18-7b97-413d-98f6-df292aaf7e49\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sbbvc" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.863944 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fq2dh\" (UniqueName: \"kubernetes.io/projected/7b93821f-dfad-44a5-a217-eb63987c1f0a-kube-api-access-fq2dh\") pod \"collect-profiles-29329290-rk7ds\" (UID: \"7b93821f-dfad-44a5-a217-eb63987c1f0a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29329290-rk7ds" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.864003 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wvcrs\" (UniqueName: \"kubernetes.io/projected/e5d9c90b-be15-4ac9-b19c-6d69db82e58f-kube-api-access-wvcrs\") pod \"service-ca-operator-777779d784-75h8j\" (UID: \"e5d9c90b-be15-4ac9-b19c-6d69db82e58f\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-75h8j" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.864063 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tb9rv\" (UniqueName: \"kubernetes.io/projected/90e799d2-898f-43e4-a937-940d2970d74e-kube-api-access-tb9rv\") pod \"kube-storage-version-migrator-operator-b67b599dd-fcpcg\" (UID: \"90e799d2-898f-43e4-a937-940d2970d74e\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-fcpcg" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.864115 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e5d9c90b-be15-4ac9-b19c-6d69db82e58f-serving-cert\") pod \"service-ca-operator-777779d784-75h8j\" (UID: \"e5d9c90b-be15-4ac9-b19c-6d69db82e58f\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-75h8j" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.864139 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/c11f3757-611f-48f6-a4c4-909f4f45ccdf-mountpoint-dir\") pod \"csi-hostpathplugin-rvzds\" (UID: \"c11f3757-611f-48f6-a4c4-909f4f45ccdf\") " pod="hostpath-provisioner/csi-hostpathplugin-rvzds" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.864168 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v44tk\" (UniqueName: \"kubernetes.io/projected/8c0d7122-aeb4-4292-a8fc-61cee9bf303c-kube-api-access-v44tk\") pod \"service-ca-9c57cc56f-gklxq\" (UID: \"8c0d7122-aeb4-4292-a8fc-61cee9bf303c\") " pod="openshift-service-ca/service-ca-9c57cc56f-gklxq" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.865213 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7b93821f-dfad-44a5-a217-eb63987c1f0a-secret-volume\") pod \"collect-profiles-29329290-rk7ds\" (UID: \"7b93821f-dfad-44a5-a217-eb63987c1f0a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29329290-rk7ds" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.867525 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b9804fd5-588e-4638-b1ff-f815e7b5f834-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-f5469\" (UID: \"b9804fd5-588e-4638-b1ff-f815e7b5f834\") " pod="openshift-marketplace/marketplace-operator-79b997595-f5469" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.867586 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/aa703301-7d55-4ee0-b118-61e91935f2de-proxy-tls\") pod \"machine-config-controller-84d6567774-89rvd\" (UID: \"aa703301-7d55-4ee0-b118-61e91935f2de\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-89rvd" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.871640 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/4f8647a0-accb-4139-8720-290260f22bb0-profile-collector-cert\") pod \"olm-operator-6b444d44fb-wzttx\" (UID: \"4f8647a0-accb-4139-8720-290260f22bb0\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-wzttx" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.872513 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/0fc6af18-7b97-413d-98f6-df292aaf7e49-apiservice-cert\") pod \"packageserver-d55dfcdfc-sbbvc\" (UID: \"0fc6af18-7b97-413d-98f6-df292aaf7e49\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sbbvc" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.873771 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/c11f3757-611f-48f6-a4c4-909f4f45ccdf-registration-dir\") pod \"csi-hostpathplugin-rvzds\" (UID: \"c11f3757-611f-48f6-a4c4-909f4f45ccdf\") " pod="hostpath-provisioner/csi-hostpathplugin-rvzds" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.874224 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-qg4qm"] Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.875683 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/10a2c372-b600-4c78-a4d5-22f5f8c1e425-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-bg97k\" (UID: \"10a2c372-b600-4c78-a4d5-22f5f8c1e425\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-bg97k" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.876495 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/ade88e7a-7f26-4fcb-b6fa-143a76a39783-certs\") pod \"machine-config-server-6cwk6\" (UID: \"ade88e7a-7f26-4fcb-b6fa-143a76a39783\") " pod="openshift-machine-config-operator/machine-config-server-6cwk6" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.878922 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/65b9c584-73e5-4414-bc72-556aabda7064-config-volume\") pod \"dns-default-wdjlv\" (UID: \"65b9c584-73e5-4414-bc72-556aabda7064\") " pod="openshift-dns/dns-default-wdjlv" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.879078 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7b93821f-dfad-44a5-a217-eb63987c1f0a-config-volume\") pod \"collect-profiles-29329290-rk7ds\" (UID: \"7b93821f-dfad-44a5-a217-eb63987c1f0a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29329290-rk7ds" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.879721 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/51673d40-dc08-42c2-87ee-0256c42c66df-srv-cert\") pod \"catalog-operator-68c6474976-rdbhp\" (UID: \"51673d40-dc08-42c2-87ee-0256c42c66df\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-rdbhp" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.880043 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/8c0d7122-aeb4-4292-a8fc-61cee9bf303c-signing-cabundle\") pod \"service-ca-9c57cc56f-gklxq\" (UID: \"8c0d7122-aeb4-4292-a8fc-61cee9bf303c\") " pod="openshift-service-ca/service-ca-9c57cc56f-gklxq" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.882676 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/0fc6af18-7b97-413d-98f6-df292aaf7e49-tmpfs\") pod \"packageserver-d55dfcdfc-sbbvc\" (UID: \"0fc6af18-7b97-413d-98f6-df292aaf7e49\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sbbvc" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.882737 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/65b9c584-73e5-4414-bc72-556aabda7064-metrics-tls\") pod \"dns-default-wdjlv\" (UID: \"65b9c584-73e5-4414-bc72-556aabda7064\") " pod="openshift-dns/dns-default-wdjlv" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.883036 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/aa703301-7d55-4ee0-b118-61e91935f2de-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-89rvd\" (UID: \"aa703301-7d55-4ee0-b118-61e91935f2de\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-89rvd" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.883133 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/c11f3757-611f-48f6-a4c4-909f4f45ccdf-socket-dir\") pod \"csi-hostpathplugin-rvzds\" (UID: \"c11f3757-611f-48f6-a4c4-909f4f45ccdf\") " pod="hostpath-provisioner/csi-hostpathplugin-rvzds" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.884007 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0bf03ffd-be54-4071-ab97-013b5aa51a1b-auth-proxy-config\") pod \"machine-config-operator-74547568cd-fggk5\" (UID: \"0bf03ffd-be54-4071-ab97-013b5aa51a1b\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-fggk5" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.884165 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/c11f3757-611f-48f6-a4c4-909f4f45ccdf-mountpoint-dir\") pod \"csi-hostpathplugin-rvzds\" (UID: \"c11f3757-611f-48f6-a4c4-909f4f45ccdf\") " pod="hostpath-provisioner/csi-hostpathplugin-rvzds" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.884216 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e5d9c90b-be15-4ac9-b19c-6d69db82e58f-config\") pod \"service-ca-operator-777779d784-75h8j\" (UID: \"e5d9c90b-be15-4ac9-b19c-6d69db82e58f\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-75h8j" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.885033 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b9804fd5-588e-4638-b1ff-f815e7b5f834-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-f5469\" (UID: \"b9804fd5-588e-4638-b1ff-f815e7b5f834\") " pod="openshift-marketplace/marketplace-operator-79b997595-f5469" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.885120 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/51673d40-dc08-42c2-87ee-0256c42c66df-profile-collector-cert\") pod \"catalog-operator-68c6474976-rdbhp\" (UID: \"51673d40-dc08-42c2-87ee-0256c42c66df\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-rdbhp" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.888198 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0bf03ffd-be54-4071-ab97-013b5aa51a1b-proxy-tls\") pod \"machine-config-operator-74547568cd-fggk5\" (UID: \"0bf03ffd-be54-4071-ab97-013b5aa51a1b\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-fggk5" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.888412 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-748rs\" (UniqueName: \"kubernetes.io/projected/d4bf9975-f58d-4419-864d-88b28d436c56-kube-api-access-748rs\") pod \"cluster-samples-operator-665b6dd947-jmp6b\" (UID: \"d4bf9975-f58d-4419-864d-88b28d436c56\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-jmp6b" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.888548 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e5d9c90b-be15-4ac9-b19c-6d69db82e58f-serving-cert\") pod \"service-ca-operator-777779d784-75h8j\" (UID: \"e5d9c90b-be15-4ac9-b19c-6d69db82e58f\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-75h8j" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.888679 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/90e799d2-898f-43e4-a937-940d2970d74e-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-fcpcg\" (UID: \"90e799d2-898f-43e4-a937-940d2970d74e\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-fcpcg" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.889021 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/90e799d2-898f-43e4-a937-940d2970d74e-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-fcpcg\" (UID: \"90e799d2-898f-43e4-a937-940d2970d74e\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-fcpcg" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.889815 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/b5167c6e-fb82-4a79-a743-aed0c32e8f9a-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-x8wd5\" (UID: \"b5167c6e-fb82-4a79-a743-aed0c32e8f9a\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-x8wd5" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.890146 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/57071541-5943-4293-ba92-c9fa3dc6ec00-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-tddsx\" (UID: \"57071541-5943-4293-ba92-c9fa3dc6ec00\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-tddsx" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.890333 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/4f8647a0-accb-4139-8720-290260f22bb0-srv-cert\") pod \"olm-operator-6b444d44fb-wzttx\" (UID: \"4f8647a0-accb-4139-8720-290260f22bb0\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-wzttx" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.893506 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/26023e16-6f2a-416b-8ccd-b1ee21bd0a57-cert\") pod \"ingress-canary-2vt8x\" (UID: \"26023e16-6f2a-416b-8ccd-b1ee21bd0a57\") " pod="openshift-ingress-canary/ingress-canary-2vt8x" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.894249 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/8c0d7122-aeb4-4292-a8fc-61cee9bf303c-signing-key\") pod \"service-ca-9c57cc56f-gklxq\" (UID: \"8c0d7122-aeb4-4292-a8fc-61cee9bf303c\") " pod="openshift-service-ca/service-ca-9c57cc56f-gklxq" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.895363 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/ade88e7a-7f26-4fcb-b6fa-143a76a39783-node-bootstrap-token\") pod \"machine-config-server-6cwk6\" (UID: \"ade88e7a-7f26-4fcb-b6fa-143a76a39783\") " pod="openshift-machine-config-operator/machine-config-server-6cwk6" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.895490 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/0fc6af18-7b97-413d-98f6-df292aaf7e49-webhook-cert\") pod \"packageserver-d55dfcdfc-sbbvc\" (UID: \"0fc6af18-7b97-413d-98f6-df292aaf7e49\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sbbvc" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.903119 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a7126d08-f833-4acc-b4f4-e7d4d88b00ca-bound-sa-token\") pod \"image-registry-697d97f7c8-99wzw\" (UID: \"a7126d08-f833-4acc-b4f4-e7d4d88b00ca\") " pod="openshift-image-registry/image-registry-697d97f7c8-99wzw" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.903224 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-xbn64" event={"ID":"0345b748-8161-40b5-bec8-0c36c2d87ea3","Type":"ContainerStarted","Data":"0ba4255813842017f410e1590e4c8826264cf5b3878386c834741f5e9359b554"} Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.903267 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-xbn64" event={"ID":"0345b748-8161-40b5-bec8-0c36c2d87ea3","Type":"ContainerStarted","Data":"807b2c6022525d0d7a6521e60ee56f1fe8bb13b2d8f7d3e9233850f5e1ccc2d8"} Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.913814 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gzh7w\" (UniqueName: \"kubernetes.io/projected/e86ac358-5c66-416e-a42d-5429b2d57c86-kube-api-access-gzh7w\") pod \"dns-operator-744455d44c-zt9z4\" (UID: \"e86ac358-5c66-416e-a42d-5429b2d57c86\") " pod="openshift-dns-operator/dns-operator-744455d44c-zt9z4" Oct 06 13:40:53 crc kubenswrapper[4757]: W1006 13:40:53.916809 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod03f79109_aa85_47fe_9ebb_f14f313aa7f6.slice/crio-015eceeaf367ab029615c5d72f30f4e00d5f2d24b8f3fb4d4484ba8bd549cc38 WatchSource:0}: Error finding container 015eceeaf367ab029615c5d72f30f4e00d5f2d24b8f3fb4d4484ba8bd549cc38: Status 404 returned error can't find the container with id 015eceeaf367ab029615c5d72f30f4e00d5f2d24b8f3fb4d4484ba8bd549cc38 Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.918683 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9c42f"] Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.918800 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-m4xmh" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.920704 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-mtmpm" event={"ID":"4845c7dd-e037-41f4-914b-bef0afffaad6","Type":"ContainerStarted","Data":"4b3b5179ded0d1e5bb1176a42e24868b265d42c0f48d579d513f5f7e2e963bef"} Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.924624 4757 generic.go:334] "Generic (PLEG): container finished" podID="7724900e-8239-400e-92a8-686e0c85f223" containerID="68a4d3f9d5971d03c3c9a8007bf6d7c25dbec8e246edb9f032acf1942803d416" exitCode=0 Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.924768 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-hm8qr" event={"ID":"7724900e-8239-400e-92a8-686e0c85f223","Type":"ContainerDied","Data":"68a4d3f9d5971d03c3c9a8007bf6d7c25dbec8e246edb9f032acf1942803d416"} Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.926301 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-sh6ck" event={"ID":"a650bded-eee2-45d7-a734-09077ffcafd3","Type":"ContainerStarted","Data":"9c7c735b63e689a185d52ba434d7fc205094ca46caf010e239e64db056730df2"} Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.930387 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-w7rv5" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.932338 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s478x\" (UniqueName: \"kubernetes.io/projected/56a6b92e-fa3b-4944-b9a3-512abeb1892d-kube-api-access-s478x\") pod \"machine-approver-56656f9798-qwn5v\" (UID: \"56a6b92e-fa3b-4944-b9a3-512abeb1892d\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-qwn5v" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.933272 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-4bz5f" event={"ID":"db5c86e6-9e3c-42e8-b816-0dc876fef80e","Type":"ContainerStarted","Data":"5d14e531e075468a584fe941c3d9d51f4deee4be9c4e35d16138c9fbf6bf4a57"} Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.935787 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-vtmh7" event={"ID":"37719f06-ec78-403f-81c3-d67831d1ce01","Type":"ContainerStarted","Data":"bbb35afb318c84329f7c7e403a299fd5d0fa8071ae3eb1ab0743a51205fc653e"} Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.935828 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-vtmh7" event={"ID":"37719f06-ec78-403f-81c3-d67831d1ce01","Type":"ContainerStarted","Data":"036489767ca34e307790fcaeda37f26cab93bccddeeaedd5420a29cbc37cc168"} Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.944701 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-pd4tr" event={"ID":"561f71de-be19-49ac-a44d-cf527ed72cb6","Type":"ContainerStarted","Data":"4b1ad9d7dfdbe9e5c5c2e3db11d066d6bac7403aa1be3561cc82bc557460ce1e"} Oct 06 13:40:53 crc kubenswrapper[4757]: W1006 13:40:53.946619 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod10780426_2bc4_42e8_924d_2716489d3b83.slice/crio-61f7f8fe053ef0296f430fcd032bdf8a14f24286ea0f402ec5e4d44e75cada83 WatchSource:0}: Error finding container 61f7f8fe053ef0296f430fcd032bdf8a14f24286ea0f402ec5e4d44e75cada83: Status 404 returned error can't find the container with id 61f7f8fe053ef0296f430fcd032bdf8a14f24286ea0f402ec5e4d44e75cada83 Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.948676 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-bgct2" event={"ID":"66e3315c-fd06-44fa-9a91-9e2e814618c4","Type":"ContainerStarted","Data":"ae4a081760117a69958f4dddc81372b24f8043eafa78a8c97c131b0ef0df03fe"} Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.950700 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8v4bh\" (UniqueName: \"kubernetes.io/projected/2a34da98-aa71-44c5-841b-1209779dadf4-kube-api-access-8v4bh\") pod \"authentication-operator-69f744f599-5drtn\" (UID: \"2a34da98-aa71-44c5-841b-1209779dadf4\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-5drtn" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.954530 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-d2rg2" event={"ID":"ead6b56a-18cc-44d4-94bb-4aaecffc945f","Type":"ContainerStarted","Data":"d473bedbec2475ea2fe741abd642d45ee2451f85bd0e33d39b4a270574ac8aa0"} Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.954632 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-d2rg2" event={"ID":"ead6b56a-18cc-44d4-94bb-4aaecffc945f","Type":"ContainerStarted","Data":"b643da70c9cd2e16bfc253e6382ae70a87956d5e50d122c0ddcdc3422e56974a"} Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.955003 4757 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-ssqcq container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.5:8443/healthz\": dial tcp 10.217.0.5:8443: connect: connection refused" start-of-body= Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.955102 4757 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-ssqcq" podUID="3d4a2fb3-0d5a-4be4-afe7-acc6e04917f9" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.5:8443/healthz\": dial tcp 10.217.0.5:8443: connect: connection refused" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.964545 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 06 13:40:53 crc kubenswrapper[4757]: E1006 13:40:53.965312 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-06 13:40:54.46529918 +0000 UTC m=+142.962617717 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.966205 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-jmp6b" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.966785 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-zt9z4" Oct 06 13:40:53 crc kubenswrapper[4757]: I1006 13:40:53.989325 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hk45v\" (UniqueName: \"kubernetes.io/projected/2ddf9932-df5d-40b6-88f8-ced01d618903-kube-api-access-hk45v\") pod \"downloads-7954f5f757-kpprd\" (UID: \"2ddf9932-df5d-40b6-88f8-ced01d618903\") " pod="openshift-console/downloads-7954f5f757-kpprd" Oct 06 13:40:54 crc kubenswrapper[4757]: I1006 13:40:54.007172 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dqhfs\" (UniqueName: \"kubernetes.io/projected/f3eed275-7acc-408b-8973-84d9c0fa817b-kube-api-access-dqhfs\") pod \"ingress-operator-5b745b69d9-nm7z8\" (UID: \"f3eed275-7acc-408b-8973-84d9c0fa817b\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-nm7z8" Oct 06 13:40:54 crc kubenswrapper[4757]: I1006 13:40:54.028211 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-kpprd" Oct 06 13:40:54 crc kubenswrapper[4757]: I1006 13:40:54.031034 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dvnrm\" (UniqueName: \"kubernetes.io/projected/8db12020-2fce-47b5-936e-e792b08976f0-kube-api-access-dvnrm\") pod \"oauth-openshift-558db77b4-zcwgr\" (UID: \"8db12020-2fce-47b5-936e-e792b08976f0\") " pod="openshift-authentication/oauth-openshift-558db77b4-zcwgr" Oct 06 13:40:54 crc kubenswrapper[4757]: I1006 13:40:54.066910 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-99wzw\" (UID: \"a7126d08-f833-4acc-b4f4-e7d4d88b00ca\") " pod="openshift-image-registry/image-registry-697d97f7c8-99wzw" Oct 06 13:40:54 crc kubenswrapper[4757]: E1006 13:40:54.069572 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-06 13:40:54.56955867 +0000 UTC m=+143.066877267 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-99wzw" (UID: "a7126d08-f833-4acc-b4f4-e7d4d88b00ca") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:40:54 crc kubenswrapper[4757]: I1006 13:40:54.094671 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/f3eed275-7acc-408b-8973-84d9c0fa817b-bound-sa-token\") pod \"ingress-operator-5b745b69d9-nm7z8\" (UID: \"f3eed275-7acc-408b-8973-84d9c0fa817b\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-nm7z8" Oct 06 13:40:54 crc kubenswrapper[4757]: I1006 13:40:54.097606 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/2431e0a3-d977-4757-a8ed-7382ee29a08b-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-sms26\" (UID: \"2431e0a3-d977-4757-a8ed-7382ee29a08b\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-sms26" Oct 06 13:40:54 crc kubenswrapper[4757]: I1006 13:40:54.101961 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mmd8g\" (UniqueName: \"kubernetes.io/projected/285b4432-8af1-4a51-8361-4a2266326d71-kube-api-access-mmd8g\") pod \"openshift-apiserver-operator-796bbdcf4f-gmv42\" (UID: \"285b4432-8af1-4a51-8361-4a2266326d71\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-gmv42" Oct 06 13:40:54 crc kubenswrapper[4757]: I1006 13:40:54.115053 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gl45c\" (UniqueName: \"kubernetes.io/projected/4f8647a0-accb-4139-8720-290260f22bb0-kube-api-access-gl45c\") pod \"olm-operator-6b444d44fb-wzttx\" (UID: \"4f8647a0-accb-4139-8720-290260f22bb0\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-wzttx" Oct 06 13:40:54 crc kubenswrapper[4757]: I1006 13:40:54.136940 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wzmzj\" (UniqueName: \"kubernetes.io/projected/b5167c6e-fb82-4a79-a743-aed0c32e8f9a-kube-api-access-wzmzj\") pod \"package-server-manager-789f6589d5-x8wd5\" (UID: \"b5167c6e-fb82-4a79-a743-aed0c32e8f9a\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-x8wd5" Oct 06 13:40:54 crc kubenswrapper[4757]: I1006 13:40:54.142606 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-qwn5v" Oct 06 13:40:54 crc kubenswrapper[4757]: I1006 13:40:54.152100 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-np4zr" Oct 06 13:40:54 crc kubenswrapper[4757]: I1006 13:40:54.153417 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v44tk\" (UniqueName: \"kubernetes.io/projected/8c0d7122-aeb4-4292-a8fc-61cee9bf303c-kube-api-access-v44tk\") pod \"service-ca-9c57cc56f-gklxq\" (UID: \"8c0d7122-aeb4-4292-a8fc-61cee9bf303c\") " pod="openshift-service-ca/service-ca-9c57cc56f-gklxq" Oct 06 13:40:54 crc kubenswrapper[4757]: I1006 13:40:54.167577 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 06 13:40:54 crc kubenswrapper[4757]: E1006 13:40:54.168150 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-06 13:40:54.668133712 +0000 UTC m=+143.165452249 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:40:54 crc kubenswrapper[4757]: I1006 13:40:54.176655 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4zblk\" (UniqueName: \"kubernetes.io/projected/c11f3757-611f-48f6-a4c4-909f4f45ccdf-kube-api-access-4zblk\") pod \"csi-hostpathplugin-rvzds\" (UID: \"c11f3757-611f-48f6-a4c4-909f4f45ccdf\") " pod="hostpath-provisioner/csi-hostpathplugin-rvzds" Oct 06 13:40:54 crc kubenswrapper[4757]: I1006 13:40:54.197908 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-49tq4\" (UniqueName: \"kubernetes.io/projected/0bf03ffd-be54-4071-ab97-013b5aa51a1b-kube-api-access-49tq4\") pod \"machine-config-operator-74547568cd-fggk5\" (UID: \"0bf03ffd-be54-4071-ab97-013b5aa51a1b\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-fggk5" Oct 06 13:40:54 crc kubenswrapper[4757]: I1006 13:40:54.199498 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-zcwgr" Oct 06 13:40:54 crc kubenswrapper[4757]: I1006 13:40:54.211044 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xjmbj\" (UniqueName: \"kubernetes.io/projected/26023e16-6f2a-416b-8ccd-b1ee21bd0a57-kube-api-access-xjmbj\") pod \"ingress-canary-2vt8x\" (UID: \"26023e16-6f2a-416b-8ccd-b1ee21bd0a57\") " pod="openshift-ingress-canary/ingress-canary-2vt8x" Oct 06 13:40:54 crc kubenswrapper[4757]: I1006 13:40:54.211576 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-w7rv5"] Oct 06 13:40:54 crc kubenswrapper[4757]: I1006 13:40:54.224058 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-m4xmh"] Oct 06 13:40:54 crc kubenswrapper[4757]: I1006 13:40:54.233743 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-phh7f\" (UniqueName: \"kubernetes.io/projected/aa703301-7d55-4ee0-b118-61e91935f2de-kube-api-access-phh7f\") pod \"machine-config-controller-84d6567774-89rvd\" (UID: \"aa703301-7d55-4ee0-b118-61e91935f2de\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-89rvd" Oct 06 13:40:54 crc kubenswrapper[4757]: I1006 13:40:54.242414 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-gmv42" Oct 06 13:40:54 crc kubenswrapper[4757]: W1006 13:40:54.242935 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda303cc7b_bff3_4fdd_8106_440b19c277d1.slice/crio-843893c63a8247f7835f6499bf45debc41a79132d6078b045c04c6a507925634 WatchSource:0}: Error finding container 843893c63a8247f7835f6499bf45debc41a79132d6078b045c04c6a507925634: Status 404 returned error can't find the container with id 843893c63a8247f7835f6499bf45debc41a79132d6078b045c04c6a507925634 Oct 06 13:40:54 crc kubenswrapper[4757]: I1006 13:40:54.247040 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-5drtn" Oct 06 13:40:54 crc kubenswrapper[4757]: I1006 13:40:54.250552 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2rfgh\" (UniqueName: \"kubernetes.io/projected/10a2c372-b600-4c78-a4d5-22f5f8c1e425-kube-api-access-2rfgh\") pod \"control-plane-machine-set-operator-78cbb6b69f-bg97k\" (UID: \"10a2c372-b600-4c78-a4d5-22f5f8c1e425\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-bg97k" Oct 06 13:40:54 crc kubenswrapper[4757]: I1006 13:40:54.272818 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-99wzw\" (UID: \"a7126d08-f833-4acc-b4f4-e7d4d88b00ca\") " pod="openshift-image-registry/image-registry-697d97f7c8-99wzw" Oct 06 13:40:54 crc kubenswrapper[4757]: E1006 13:40:54.273702 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-06 13:40:54.77368561 +0000 UTC m=+143.271004147 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-99wzw" (UID: "a7126d08-f833-4acc-b4f4-e7d4d88b00ca") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:40:54 crc kubenswrapper[4757]: I1006 13:40:54.277190 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6gfb4\" (UniqueName: \"kubernetes.io/projected/51673d40-dc08-42c2-87ee-0256c42c66df-kube-api-access-6gfb4\") pod \"catalog-operator-68c6474976-rdbhp\" (UID: \"51673d40-dc08-42c2-87ee-0256c42c66df\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-rdbhp" Oct 06 13:40:54 crc kubenswrapper[4757]: I1006 13:40:54.298803 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l8rdg\" (UniqueName: \"kubernetes.io/projected/0fc6af18-7b97-413d-98f6-df292aaf7e49-kube-api-access-l8rdg\") pod \"packageserver-d55dfcdfc-sbbvc\" (UID: \"0fc6af18-7b97-413d-98f6-df292aaf7e49\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sbbvc" Oct 06 13:40:54 crc kubenswrapper[4757]: I1006 13:40:54.300072 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-zt9z4"] Oct 06 13:40:54 crc kubenswrapper[4757]: I1006 13:40:54.304389 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-nm7z8" Oct 06 13:40:54 crc kubenswrapper[4757]: I1006 13:40:54.311422 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-sms26" Oct 06 13:40:54 crc kubenswrapper[4757]: I1006 13:40:54.315704 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pxq9d\" (UniqueName: \"kubernetes.io/projected/65b9c584-73e5-4414-bc72-556aabda7064-kube-api-access-pxq9d\") pod \"dns-default-wdjlv\" (UID: \"65b9c584-73e5-4414-bc72-556aabda7064\") " pod="openshift-dns/dns-default-wdjlv" Oct 06 13:40:54 crc kubenswrapper[4757]: I1006 13:40:54.321021 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-fggk5" Oct 06 13:40:54 crc kubenswrapper[4757]: I1006 13:40:54.331037 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-89rvd" Oct 06 13:40:54 crc kubenswrapper[4757]: I1006 13:40:54.346307 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wvcrs\" (UniqueName: \"kubernetes.io/projected/e5d9c90b-be15-4ac9-b19c-6d69db82e58f-kube-api-access-wvcrs\") pod \"service-ca-operator-777779d784-75h8j\" (UID: \"e5d9c90b-be15-4ac9-b19c-6d69db82e58f\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-75h8j" Oct 06 13:40:54 crc kubenswrapper[4757]: I1006 13:40:54.347001 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-bg97k" Oct 06 13:40:54 crc kubenswrapper[4757]: I1006 13:40:54.351596 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fq2dh\" (UniqueName: \"kubernetes.io/projected/7b93821f-dfad-44a5-a217-eb63987c1f0a-kube-api-access-fq2dh\") pod \"collect-profiles-29329290-rk7ds\" (UID: \"7b93821f-dfad-44a5-a217-eb63987c1f0a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29329290-rk7ds" Oct 06 13:40:54 crc kubenswrapper[4757]: I1006 13:40:54.363309 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-kpprd"] Oct 06 13:40:54 crc kubenswrapper[4757]: I1006 13:40:54.363535 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-x8wd5" Oct 06 13:40:54 crc kubenswrapper[4757]: I1006 13:40:54.373615 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-75h8j" Oct 06 13:40:54 crc kubenswrapper[4757]: I1006 13:40:54.377313 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sbbvc" Oct 06 13:40:54 crc kubenswrapper[4757]: I1006 13:40:54.379851 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p8k2f\" (UniqueName: \"kubernetes.io/projected/57071541-5943-4293-ba92-c9fa3dc6ec00-kube-api-access-p8k2f\") pod \"multus-admission-controller-857f4d67dd-tddsx\" (UID: \"57071541-5943-4293-ba92-c9fa3dc6ec00\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-tddsx" Oct 06 13:40:54 crc kubenswrapper[4757]: I1006 13:40:54.381367 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-jmp6b"] Oct 06 13:40:54 crc kubenswrapper[4757]: I1006 13:40:54.384352 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-rdbhp" Oct 06 13:40:54 crc kubenswrapper[4757]: I1006 13:40:54.392757 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hb86r\" (UniqueName: \"kubernetes.io/projected/ade88e7a-7f26-4fcb-b6fa-143a76a39783-kube-api-access-hb86r\") pod \"machine-config-server-6cwk6\" (UID: \"ade88e7a-7f26-4fcb-b6fa-143a76a39783\") " pod="openshift-machine-config-operator/machine-config-server-6cwk6" Oct 06 13:40:54 crc kubenswrapper[4757]: I1006 13:40:54.398426 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29329290-rk7ds" Oct 06 13:40:54 crc kubenswrapper[4757]: I1006 13:40:54.399077 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-gklxq" Oct 06 13:40:54 crc kubenswrapper[4757]: I1006 13:40:54.406980 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-wzttx" Oct 06 13:40:54 crc kubenswrapper[4757]: I1006 13:40:54.407631 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 06 13:40:54 crc kubenswrapper[4757]: E1006 13:40:54.408016 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-06 13:40:54.907997098 +0000 UTC m=+143.405315635 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:40:54 crc kubenswrapper[4757]: I1006 13:40:54.423408 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-rvzds" Oct 06 13:40:54 crc kubenswrapper[4757]: I1006 13:40:54.427259 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-wdjlv" Oct 06 13:40:54 crc kubenswrapper[4757]: I1006 13:40:54.427779 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2rtm\" (UniqueName: \"kubernetes.io/projected/b9804fd5-588e-4638-b1ff-f815e7b5f834-kube-api-access-s2rtm\") pod \"marketplace-operator-79b997595-f5469\" (UID: \"b9804fd5-588e-4638-b1ff-f815e7b5f834\") " pod="openshift-marketplace/marketplace-operator-79b997595-f5469" Oct 06 13:40:54 crc kubenswrapper[4757]: I1006 13:40:54.429173 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tb9rv\" (UniqueName: \"kubernetes.io/projected/90e799d2-898f-43e4-a937-940d2970d74e-kube-api-access-tb9rv\") pod \"kube-storage-version-migrator-operator-b67b599dd-fcpcg\" (UID: \"90e799d2-898f-43e4-a937-940d2970d74e\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-fcpcg" Oct 06 13:40:54 crc kubenswrapper[4757]: I1006 13:40:54.433464 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-2vt8x" Oct 06 13:40:54 crc kubenswrapper[4757]: I1006 13:40:54.441250 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-6cwk6" Oct 06 13:40:54 crc kubenswrapper[4757]: I1006 13:40:54.453953 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-np4zr"] Oct 06 13:40:54 crc kubenswrapper[4757]: I1006 13:40:54.508643 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-99wzw\" (UID: \"a7126d08-f833-4acc-b4f4-e7d4d88b00ca\") " pod="openshift-image-registry/image-registry-697d97f7c8-99wzw" Oct 06 13:40:54 crc kubenswrapper[4757]: E1006 13:40:54.509079 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-06 13:40:55.009062731 +0000 UTC m=+143.506381268 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-99wzw" (UID: "a7126d08-f833-4acc-b4f4-e7d4d88b00ca") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:40:54 crc kubenswrapper[4757]: I1006 13:40:54.616449 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 06 13:40:54 crc kubenswrapper[4757]: E1006 13:40:54.617504 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-06 13:40:55.117490376 +0000 UTC m=+143.614808903 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:40:54 crc kubenswrapper[4757]: I1006 13:40:54.623517 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-zcwgr"] Oct 06 13:40:54 crc kubenswrapper[4757]: I1006 13:40:54.638019 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-tddsx" Oct 06 13:40:54 crc kubenswrapper[4757]: I1006 13:40:54.641649 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-fcpcg" Oct 06 13:40:54 crc kubenswrapper[4757]: I1006 13:40:54.657333 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-f5469" Oct 06 13:40:54 crc kubenswrapper[4757]: I1006 13:40:54.717927 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-5drtn"] Oct 06 13:40:54 crc kubenswrapper[4757]: I1006 13:40:54.718530 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-99wzw\" (UID: \"a7126d08-f833-4acc-b4f4-e7d4d88b00ca\") " pod="openshift-image-registry/image-registry-697d97f7c8-99wzw" Oct 06 13:40:54 crc kubenswrapper[4757]: E1006 13:40:54.718908 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-06 13:40:55.21889612 +0000 UTC m=+143.716214657 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-99wzw" (UID: "a7126d08-f833-4acc-b4f4-e7d4d88b00ca") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:40:54 crc kubenswrapper[4757]: I1006 13:40:54.821995 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 06 13:40:54 crc kubenswrapper[4757]: E1006 13:40:54.822801 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-06 13:40:55.322782027 +0000 UTC m=+143.820100564 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:40:54 crc kubenswrapper[4757]: I1006 13:40:54.924695 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-99wzw\" (UID: \"a7126d08-f833-4acc-b4f4-e7d4d88b00ca\") " pod="openshift-image-registry/image-registry-697d97f7c8-99wzw" Oct 06 13:40:54 crc kubenswrapper[4757]: E1006 13:40:54.925020 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-06 13:40:55.425006074 +0000 UTC m=+143.922324611 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-99wzw" (UID: "a7126d08-f833-4acc-b4f4-e7d4d88b00ca") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:40:54 crc kubenswrapper[4757]: I1006 13:40:54.962159 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-zt9z4" event={"ID":"e86ac358-5c66-416e-a42d-5429b2d57c86","Type":"ContainerStarted","Data":"78f55adb21172ba2326a0bfa29969cf4cbb120efe4bf6aed01b7d6908e5d06eb"} Oct 06 13:40:54 crc kubenswrapper[4757]: I1006 13:40:54.963376 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9c42f" event={"ID":"10780426-2bc4-42e8-924d-2716489d3b83","Type":"ContainerStarted","Data":"0c40e1446926eefe22fdc2f1e55175d74110f8c112946d964ec2f39e5f600108"} Oct 06 13:40:54 crc kubenswrapper[4757]: I1006 13:40:54.963434 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9c42f" event={"ID":"10780426-2bc4-42e8-924d-2716489d3b83","Type":"ContainerStarted","Data":"61f7f8fe053ef0296f430fcd032bdf8a14f24286ea0f402ec5e4d44e75cada83"} Oct 06 13:40:54 crc kubenswrapper[4757]: I1006 13:40:54.985752 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-m4xmh" event={"ID":"c2bf58ce-8c1e-4f9c-935e-3f61f3282676","Type":"ContainerStarted","Data":"05bfb57c0dea0ee2848ea41663b1fcaa220306ae16db49847ba0c373250ccc02"} Oct 06 13:40:54 crc kubenswrapper[4757]: I1006 13:40:54.990368 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-mtmpm" event={"ID":"4845c7dd-e037-41f4-914b-bef0afffaad6","Type":"ContainerStarted","Data":"1d3c32d6fe77cee681189f55de0815bffcf0c1116aff9d1986231a26794270c8"} Oct 06 13:40:54 crc kubenswrapper[4757]: I1006 13:40:54.997551 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-hm8qr" event={"ID":"7724900e-8239-400e-92a8-686e0c85f223","Type":"ContainerStarted","Data":"0203d1d1359c39ae36e3a064fdcd7eaca41646dc9d46db959ecc8d012f7518ac"} Oct 06 13:40:55 crc kubenswrapper[4757]: I1006 13:40:54.999781 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-w7rv5" event={"ID":"a303cc7b-bff3-4fdd-8106-440b19c277d1","Type":"ContainerStarted","Data":"843893c63a8247f7835f6499bf45debc41a79132d6078b045c04c6a507925634"} Oct 06 13:40:55 crc kubenswrapper[4757]: I1006 13:40:55.006266 4757 generic.go:334] "Generic (PLEG): container finished" podID="03f79109-aa85-47fe-9ebb-f14f313aa7f6" containerID="a710d8c205a82d22191a809266cc17e689bd30bc347b9cb04158e96ef76e3594" exitCode=0 Oct 06 13:40:55 crc kubenswrapper[4757]: I1006 13:40:55.006354 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-qg4qm" event={"ID":"03f79109-aa85-47fe-9ebb-f14f313aa7f6","Type":"ContainerDied","Data":"a710d8c205a82d22191a809266cc17e689bd30bc347b9cb04158e96ef76e3594"} Oct 06 13:40:55 crc kubenswrapper[4757]: I1006 13:40:55.006379 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-qg4qm" event={"ID":"03f79109-aa85-47fe-9ebb-f14f313aa7f6","Type":"ContainerStarted","Data":"015eceeaf367ab029615c5d72f30f4e00d5f2d24b8f3fb4d4484ba8bd549cc38"} Oct 06 13:40:55 crc kubenswrapper[4757]: I1006 13:40:55.008238 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-zcwgr" event={"ID":"8db12020-2fce-47b5-936e-e792b08976f0","Type":"ContainerStarted","Data":"f0a46bdc7f73d5935d4a725d0f8aeec87c27d59eca17f9f84a07fbd8c48f2579"} Oct 06 13:40:55 crc kubenswrapper[4757]: I1006 13:40:55.019331 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-np4zr" event={"ID":"74caa6de-7695-4dcb-9daf-f3368905de1c","Type":"ContainerStarted","Data":"805278454ff71deb2bd4d2d2c9d777de1c28e22dee86d0b93a7562ebf5b726d7"} Oct 06 13:40:55 crc kubenswrapper[4757]: I1006 13:40:55.022325 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-kpprd" event={"ID":"2ddf9932-df5d-40b6-88f8-ced01d618903","Type":"ContainerStarted","Data":"f8fa2e4f8c0b4541ae5ce4ba562ab1e695a4c4728e9357a7aabf447a7fd6d5e2"} Oct 06 13:40:55 crc kubenswrapper[4757]: I1006 13:40:55.026446 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 06 13:40:55 crc kubenswrapper[4757]: E1006 13:40:55.027658 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-06 13:40:55.527623533 +0000 UTC m=+144.024942070 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:40:55 crc kubenswrapper[4757]: I1006 13:40:55.036292 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-4bz5f" event={"ID":"db5c86e6-9e3c-42e8-b816-0dc876fef80e","Type":"ContainerStarted","Data":"87adddedf79182ffddf68ee60bba85d86d2c9aa1f17c5b2a70410be0e1de567d"} Oct 06 13:40:55 crc kubenswrapper[4757]: I1006 13:40:55.043670 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-vtmh7" event={"ID":"37719f06-ec78-403f-81c3-d67831d1ce01","Type":"ContainerStarted","Data":"7122b1ddfe4aa0ce8c51ef03f5241173d2d5bf79f0559645442f80d1d2ab4b42"} Oct 06 13:40:55 crc kubenswrapper[4757]: I1006 13:40:55.096904 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-nm7z8"] Oct 06 13:40:55 crc kubenswrapper[4757]: I1006 13:40:55.099360 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-gmv42"] Oct 06 13:40:55 crc kubenswrapper[4757]: I1006 13:40:55.110758 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-qwn5v" event={"ID":"56a6b92e-fa3b-4944-b9a3-512abeb1892d","Type":"ContainerStarted","Data":"02347bf3c106b1e2e97ea7b2c1526688fbf65d881052270a448959f0f482c637"} Oct 06 13:40:55 crc kubenswrapper[4757]: I1006 13:40:55.110808 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-qwn5v" event={"ID":"56a6b92e-fa3b-4944-b9a3-512abeb1892d","Type":"ContainerStarted","Data":"d6b8654a924690faa7208b12f0002142a15209922ac5c9932c1593db31645bad"} Oct 06 13:40:55 crc kubenswrapper[4757]: I1006 13:40:55.127754 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-99wzw\" (UID: \"a7126d08-f833-4acc-b4f4-e7d4d88b00ca\") " pod="openshift-image-registry/image-registry-697d97f7c8-99wzw" Oct 06 13:40:55 crc kubenswrapper[4757]: E1006 13:40:55.128890 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-06 13:40:55.628877444 +0000 UTC m=+144.126195981 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-99wzw" (UID: "a7126d08-f833-4acc-b4f4-e7d4d88b00ca") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:40:55 crc kubenswrapper[4757]: I1006 13:40:55.142945 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-xbn64" podStartSLOduration=124.142921791 podStartE2EDuration="2m4.142921791s" podCreationTimestamp="2025-10-06 13:38:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:40:55.141402675 +0000 UTC m=+143.638721212" watchObservedRunningTime="2025-10-06 13:40:55.142921791 +0000 UTC m=+143.640240328" Oct 06 13:40:55 crc kubenswrapper[4757]: I1006 13:40:55.146469 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-sh6ck" event={"ID":"a650bded-eee2-45d7-a734-09077ffcafd3","Type":"ContainerStarted","Data":"b2d650c52f809a8413568b07d457446a9355a368337c647ef3c0428b4bc376c0"} Oct 06 13:40:55 crc kubenswrapper[4757]: I1006 13:40:55.155358 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-6cwk6" event={"ID":"ade88e7a-7f26-4fcb-b6fa-143a76a39783","Type":"ContainerStarted","Data":"f79b4fc0cc18a1bdd90a67d5ae2ecc266dde8020c4caaac3365cccde7ccda19b"} Oct 06 13:40:55 crc kubenswrapper[4757]: I1006 13:40:55.179791 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-5drtn" event={"ID":"2a34da98-aa71-44c5-841b-1209779dadf4","Type":"ContainerStarted","Data":"2a701176cd7ec4aed73324502b7f7408db7941f9641165875d80d6470b6a3d24"} Oct 06 13:40:55 crc kubenswrapper[4757]: I1006 13:40:55.234671 4757 generic.go:334] "Generic (PLEG): container finished" podID="66e3315c-fd06-44fa-9a91-9e2e814618c4" containerID="5b9c4411a59d8aeb611dfc62ac22d131b2af483d3033772ed7521d5639f13c41" exitCode=0 Oct 06 13:40:55 crc kubenswrapper[4757]: I1006 13:40:55.234786 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-bgct2" event={"ID":"66e3315c-fd06-44fa-9a91-9e2e814618c4","Type":"ContainerDied","Data":"5b9c4411a59d8aeb611dfc62ac22d131b2af483d3033772ed7521d5639f13c41"} Oct 06 13:40:55 crc kubenswrapper[4757]: I1006 13:40:55.236974 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 06 13:40:55 crc kubenswrapper[4757]: E1006 13:40:55.238183 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-06 13:40:55.73816829 +0000 UTC m=+144.235486827 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:40:55 crc kubenswrapper[4757]: I1006 13:40:55.250035 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-89rvd"] Oct 06 13:40:55 crc kubenswrapper[4757]: I1006 13:40:55.262352 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-jmp6b" event={"ID":"d4bf9975-f58d-4419-864d-88b28d436c56","Type":"ContainerStarted","Data":"3be11a8973f348d64e4317daf45d03825a63db5a2a6e25b6a1d9eca965a09845"} Oct 06 13:40:55 crc kubenswrapper[4757]: I1006 13:40:55.277556 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-pd4tr" event={"ID":"561f71de-be19-49ac-a44d-cf527ed72cb6","Type":"ContainerStarted","Data":"3a6aa0e58b6d1b33395061dd341a31cdc7638356135a54edfbc580b9016c94e4"} Oct 06 13:40:55 crc kubenswrapper[4757]: I1006 13:40:55.278251 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-d2rg2" Oct 06 13:40:55 crc kubenswrapper[4757]: I1006 13:40:55.300655 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-ssqcq" Oct 06 13:40:55 crc kubenswrapper[4757]: I1006 13:40:55.303235 4757 patch_prober.go:28] interesting pod/console-operator-58897d9998-d2rg2 container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.10:8443/readyz\": dial tcp 10.217.0.10:8443: connect: connection refused" start-of-body= Oct 06 13:40:55 crc kubenswrapper[4757]: I1006 13:40:55.303293 4757 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-d2rg2" podUID="ead6b56a-18cc-44d4-94bb-4aaecffc945f" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.10:8443/readyz\": dial tcp 10.217.0.10:8443: connect: connection refused" Oct 06 13:40:55 crc kubenswrapper[4757]: I1006 13:40:55.323293 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-ssqcq" podStartSLOduration=123.323274205 podStartE2EDuration="2m3.323274205s" podCreationTimestamp="2025-10-06 13:38:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:40:55.320941548 +0000 UTC m=+143.818260115" watchObservedRunningTime="2025-10-06 13:40:55.323274205 +0000 UTC m=+143.820592742" Oct 06 13:40:55 crc kubenswrapper[4757]: I1006 13:40:55.341485 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29329290-rk7ds"] Oct 06 13:40:55 crc kubenswrapper[4757]: I1006 13:40:55.345489 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-99wzw\" (UID: \"a7126d08-f833-4acc-b4f4-e7d4d88b00ca\") " pod="openshift-image-registry/image-registry-697d97f7c8-99wzw" Oct 06 13:40:55 crc kubenswrapper[4757]: E1006 13:40:55.347280 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-06 13:40:55.847263519 +0000 UTC m=+144.344582256 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-99wzw" (UID: "a7126d08-f833-4acc-b4f4-e7d4d88b00ca") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:40:55 crc kubenswrapper[4757]: I1006 13:40:55.376358 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-fggk5"] Oct 06 13:40:55 crc kubenswrapper[4757]: I1006 13:40:55.446578 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 06 13:40:55 crc kubenswrapper[4757]: E1006 13:40:55.446887 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-06 13:40:55.946872827 +0000 UTC m=+144.444191364 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:40:55 crc kubenswrapper[4757]: I1006 13:40:55.529873 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-mtmpm" Oct 06 13:40:55 crc kubenswrapper[4757]: I1006 13:40:55.548207 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-99wzw\" (UID: \"a7126d08-f833-4acc-b4f4-e7d4d88b00ca\") " pod="openshift-image-registry/image-registry-697d97f7c8-99wzw" Oct 06 13:40:55 crc kubenswrapper[4757]: E1006 13:40:55.548516 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-06 13:40:56.048504942 +0000 UTC m=+144.545823479 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-99wzw" (UID: "a7126d08-f833-4acc-b4f4-e7d4d88b00ca") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:40:55 crc kubenswrapper[4757]: I1006 13:40:55.650566 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 06 13:40:55 crc kubenswrapper[4757]: E1006 13:40:55.650687 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-06 13:40:56.150671026 +0000 UTC m=+144.647989563 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:40:55 crc kubenswrapper[4757]: I1006 13:40:55.650792 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-99wzw\" (UID: \"a7126d08-f833-4acc-b4f4-e7d4d88b00ca\") " pod="openshift-image-registry/image-registry-697d97f7c8-99wzw" Oct 06 13:40:55 crc kubenswrapper[4757]: E1006 13:40:55.651180 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-06 13:40:56.151172323 +0000 UTC m=+144.648490860 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-99wzw" (UID: "a7126d08-f833-4acc-b4f4-e7d4d88b00ca") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:40:55 crc kubenswrapper[4757]: I1006 13:40:55.680215 4757 patch_prober.go:28] interesting pod/router-default-5444994796-mtmpm container/router namespace/openshift-ingress: Startup probe status=failure output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" start-of-body= Oct 06 13:40:55 crc kubenswrapper[4757]: I1006 13:40:55.680275 4757 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-mtmpm" podUID="4845c7dd-e037-41f4-914b-bef0afffaad6" containerName="router" probeResult="failure" output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" Oct 06 13:40:55 crc kubenswrapper[4757]: I1006 13:40:55.683262 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-d2rg2" podStartSLOduration=124.683245605 podStartE2EDuration="2m4.683245605s" podCreationTimestamp="2025-10-06 13:38:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:40:55.643821413 +0000 UTC m=+144.141139960" watchObservedRunningTime="2025-10-06 13:40:55.683245605 +0000 UTC m=+144.180564142" Oct 06 13:40:55 crc kubenswrapper[4757]: I1006 13:40:55.770524 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 06 13:40:55 crc kubenswrapper[4757]: E1006 13:40:55.771299 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-06 13:40:56.271277969 +0000 UTC m=+144.768596506 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:40:55 crc kubenswrapper[4757]: I1006 13:40:55.780825 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-bg97k"] Oct 06 13:40:55 crc kubenswrapper[4757]: I1006 13:40:55.831854 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-sms26"] Oct 06 13:40:55 crc kubenswrapper[4757]: I1006 13:40:55.866423 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-gklxq"] Oct 06 13:40:55 crc kubenswrapper[4757]: I1006 13:40:55.873891 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-99wzw\" (UID: \"a7126d08-f833-4acc-b4f4-e7d4d88b00ca\") " pod="openshift-image-registry/image-registry-697d97f7c8-99wzw" Oct 06 13:40:55 crc kubenswrapper[4757]: E1006 13:40:55.874193 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-06 13:40:56.374181899 +0000 UTC m=+144.871500436 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-99wzw" (UID: "a7126d08-f833-4acc-b4f4-e7d4d88b00ca") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:40:55 crc kubenswrapper[4757]: W1006 13:40:55.922761 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8c0d7122_aeb4_4292_a8fc_61cee9bf303c.slice/crio-1ec5e5a080840c1acbcd341f0306675cef0055747c71d93b782231294db7f3ed WatchSource:0}: Error finding container 1ec5e5a080840c1acbcd341f0306675cef0055747c71d93b782231294db7f3ed: Status 404 returned error can't find the container with id 1ec5e5a080840c1acbcd341f0306675cef0055747c71d93b782231294db7f3ed Oct 06 13:40:55 crc kubenswrapper[4757]: I1006 13:40:55.975586 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 06 13:40:55 crc kubenswrapper[4757]: E1006 13:40:55.976239 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-06 13:40:56.476213207 +0000 UTC m=+144.973531744 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:40:56 crc kubenswrapper[4757]: I1006 13:40:56.081340 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-99wzw\" (UID: \"a7126d08-f833-4acc-b4f4-e7d4d88b00ca\") " pod="openshift-image-registry/image-registry-697d97f7c8-99wzw" Oct 06 13:40:56 crc kubenswrapper[4757]: E1006 13:40:56.081962 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-06 13:40:56.581946363 +0000 UTC m=+145.079264900 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-99wzw" (UID: "a7126d08-f833-4acc-b4f4-e7d4d88b00ca") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:40:56 crc kubenswrapper[4757]: I1006 13:40:56.143693 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-vtmh7" podStartSLOduration=124.143670537 podStartE2EDuration="2m4.143670537s" podCreationTimestamp="2025-10-06 13:38:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:40:56.138332479 +0000 UTC m=+144.635651016" watchObservedRunningTime="2025-10-06 13:40:56.143670537 +0000 UTC m=+144.640989074" Oct 06 13:40:56 crc kubenswrapper[4757]: I1006 13:40:56.159363 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-f5469"] Oct 06 13:40:56 crc kubenswrapper[4757]: I1006 13:40:56.160341 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-fcpcg"] Oct 06 13:40:56 crc kubenswrapper[4757]: I1006 13:40:56.163205 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-wzttx"] Oct 06 13:40:56 crc kubenswrapper[4757]: I1006 13:40:56.175162 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-tddsx"] Oct 06 13:40:56 crc kubenswrapper[4757]: I1006 13:40:56.182363 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 06 13:40:56 crc kubenswrapper[4757]: E1006 13:40:56.182720 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-06 13:40:56.682706675 +0000 UTC m=+145.180025212 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:40:56 crc kubenswrapper[4757]: I1006 13:40:56.207570 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-wdjlv"] Oct 06 13:40:56 crc kubenswrapper[4757]: W1006 13:40:56.224618 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb9804fd5_588e_4638_b1ff_f815e7b5f834.slice/crio-b59d41d25f8b0c581e3101c892da4615dd7e1327cf07d841759d5c3c5a98feaf WatchSource:0}: Error finding container b59d41d25f8b0c581e3101c892da4615dd7e1327cf07d841759d5c3c5a98feaf: Status 404 returned error can't find the container with id b59d41d25f8b0c581e3101c892da4615dd7e1327cf07d841759d5c3c5a98feaf Oct 06 13:40:56 crc kubenswrapper[4757]: I1006 13:40:56.228916 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-75h8j"] Oct 06 13:40:56 crc kubenswrapper[4757]: I1006 13:40:56.238553 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-sh6ck" podStartSLOduration=124.238530361 podStartE2EDuration="2m4.238530361s" podCreationTimestamp="2025-10-06 13:38:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:40:56.207509498 +0000 UTC m=+144.704828055" watchObservedRunningTime="2025-10-06 13:40:56.238530361 +0000 UTC m=+144.735848908" Oct 06 13:40:56 crc kubenswrapper[4757]: I1006 13:40:56.249965 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-4bz5f" podStartSLOduration=124.249943542 podStartE2EDuration="2m4.249943542s" podCreationTimestamp="2025-10-06 13:38:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:40:56.248197317 +0000 UTC m=+144.745515874" watchObservedRunningTime="2025-10-06 13:40:56.249943542 +0000 UTC m=+144.747262079" Oct 06 13:40:56 crc kubenswrapper[4757]: I1006 13:40:56.284323 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-99wzw\" (UID: \"a7126d08-f833-4acc-b4f4-e7d4d88b00ca\") " pod="openshift-image-registry/image-registry-697d97f7c8-99wzw" Oct 06 13:40:56 crc kubenswrapper[4757]: E1006 13:40:56.284625 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-06 13:40:56.784613858 +0000 UTC m=+145.281932395 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-99wzw" (UID: "a7126d08-f833-4acc-b4f4-e7d4d88b00ca") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:40:56 crc kubenswrapper[4757]: I1006 13:40:56.293628 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-pd4tr" podStartSLOduration=125.29361136 podStartE2EDuration="2m5.29361136s" podCreationTimestamp="2025-10-06 13:38:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:40:56.291313496 +0000 UTC m=+144.788632043" watchObservedRunningTime="2025-10-06 13:40:56.29361136 +0000 UTC m=+144.790929897" Oct 06 13:40:56 crc kubenswrapper[4757]: I1006 13:40:56.325411 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-2vt8x"] Oct 06 13:40:56 crc kubenswrapper[4757]: I1006 13:40:56.361257 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sbbvc"] Oct 06 13:40:56 crc kubenswrapper[4757]: I1006 13:40:56.367476 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-gmv42" event={"ID":"285b4432-8af1-4a51-8361-4a2266326d71","Type":"ContainerStarted","Data":"84a54f013822f90e1c8df2b8910efc02876e400ce4b6027f3cb85fd2892d32e8"} Oct 06 13:40:56 crc kubenswrapper[4757]: I1006 13:40:56.369699 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-rdbhp"] Oct 06 13:40:56 crc kubenswrapper[4757]: I1006 13:40:56.383241 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-zt9z4" event={"ID":"e86ac358-5c66-416e-a42d-5429b2d57c86","Type":"ContainerStarted","Data":"e48c8df86057d6b3781fb5f2ef175b3d7a17c8f9fdaeb3867ba630a2a2c652de"} Oct 06 13:40:56 crc kubenswrapper[4757]: I1006 13:40:56.389925 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 06 13:40:56 crc kubenswrapper[4757]: E1006 13:40:56.398399 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-06 13:40:56.890086434 +0000 UTC m=+145.387404971 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:40:56 crc kubenswrapper[4757]: I1006 13:40:56.398634 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-99wzw\" (UID: \"a7126d08-f833-4acc-b4f4-e7d4d88b00ca\") " pod="openshift-image-registry/image-registry-697d97f7c8-99wzw" Oct 06 13:40:56 crc kubenswrapper[4757]: E1006 13:40:56.398936 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-06 13:40:56.898922529 +0000 UTC m=+145.396241066 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-99wzw" (UID: "a7126d08-f833-4acc-b4f4-e7d4d88b00ca") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:40:56 crc kubenswrapper[4757]: W1006 13:40:56.404358 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod51673d40_dc08_42c2_87ee_0256c42c66df.slice/crio-020a6690b87a5340c56477ab8d713be71860971ee7aad4362fd72c17a27517f4 WatchSource:0}: Error finding container 020a6690b87a5340c56477ab8d713be71860971ee7aad4362fd72c17a27517f4: Status 404 returned error can't find the container with id 020a6690b87a5340c56477ab8d713be71860971ee7aad4362fd72c17a27517f4 Oct 06 13:40:56 crc kubenswrapper[4757]: I1006 13:40:56.404533 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-w7rv5" event={"ID":"a303cc7b-bff3-4fdd-8106-440b19c277d1","Type":"ContainerStarted","Data":"a9e0b5373c58c425db4543d62872f527943aa2c077a3f9501e441f8c061a29ba"} Oct 06 13:40:56 crc kubenswrapper[4757]: I1006 13:40:56.414805 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9c42f" podStartSLOduration=124.414790474 podStartE2EDuration="2m4.414790474s" podCreationTimestamp="2025-10-06 13:38:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:40:56.409075874 +0000 UTC m=+144.906394411" watchObservedRunningTime="2025-10-06 13:40:56.414790474 +0000 UTC m=+144.912109011" Oct 06 13:40:56 crc kubenswrapper[4757]: I1006 13:40:56.417723 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-x8wd5"] Oct 06 13:40:56 crc kubenswrapper[4757]: I1006 13:40:56.417768 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-tddsx" event={"ID":"57071541-5943-4293-ba92-c9fa3dc6ec00","Type":"ContainerStarted","Data":"ff0fc50d8276e32660a8178af795b3f4bfb97942a3b04510f32c9355b36871ee"} Oct 06 13:40:56 crc kubenswrapper[4757]: I1006 13:40:56.418942 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-75h8j" event={"ID":"e5d9c90b-be15-4ac9-b19c-6d69db82e58f","Type":"ContainerStarted","Data":"a1c2ca2f1bfe02396a7a026ca58a9b5ccd3b5a24797666f4638696213f6108d2"} Oct 06 13:40:56 crc kubenswrapper[4757]: I1006 13:40:56.420314 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-fcpcg" event={"ID":"90e799d2-898f-43e4-a937-940d2970d74e","Type":"ContainerStarted","Data":"ebee3ef3546617261cc7c197374942aa0aa7fd10812ddc55a172e32d70d818cb"} Oct 06 13:40:56 crc kubenswrapper[4757]: I1006 13:40:56.422902 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-sms26" event={"ID":"2431e0a3-d977-4757-a8ed-7382ee29a08b","Type":"ContainerStarted","Data":"39681c8898a746ddaf7f9f9c095dacadb7a5e3a5b33ecb1ebd755a958f88a91e"} Oct 06 13:40:56 crc kubenswrapper[4757]: I1006 13:40:56.423802 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-wzttx" event={"ID":"4f8647a0-accb-4139-8720-290260f22bb0","Type":"ContainerStarted","Data":"53b6930c6b2e5931bb2553ae86ef08d1a772acc4c99b1ab2c8386b7f07527fe4"} Oct 06 13:40:56 crc kubenswrapper[4757]: I1006 13:40:56.424882 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-np4zr" event={"ID":"74caa6de-7695-4dcb-9daf-f3368905de1c","Type":"ContainerStarted","Data":"b6e369a0187661a348e078177f73131979eee2904487669e5986defb4251d5fa"} Oct 06 13:40:56 crc kubenswrapper[4757]: I1006 13:40:56.425765 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-np4zr" Oct 06 13:40:56 crc kubenswrapper[4757]: I1006 13:40:56.426594 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-gklxq" event={"ID":"8c0d7122-aeb4-4292-a8fc-61cee9bf303c","Type":"ContainerStarted","Data":"1ec5e5a080840c1acbcd341f0306675cef0055747c71d93b782231294db7f3ed"} Oct 06 13:40:56 crc kubenswrapper[4757]: I1006 13:40:56.427534 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-nm7z8" event={"ID":"f3eed275-7acc-408b-8973-84d9c0fa817b","Type":"ContainerStarted","Data":"4c9905f824a62e14fde968a5cb93b07d8c19afea6426810e930c15b5d4fed970"} Oct 06 13:40:56 crc kubenswrapper[4757]: I1006 13:40:56.427556 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-nm7z8" event={"ID":"f3eed275-7acc-408b-8973-84d9c0fa817b","Type":"ContainerStarted","Data":"36faea8330566bc39409702b5617bef374e6319e1818282248744bccf3fe8876"} Oct 06 13:40:56 crc kubenswrapper[4757]: I1006 13:40:56.428958 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-qg4qm" event={"ID":"03f79109-aa85-47fe-9ebb-f14f313aa7f6","Type":"ContainerStarted","Data":"4f63d79b1a51e10d0c7cc62b673387f63b03bcd9d110ed55e57aca676347e7ca"} Oct 06 13:40:56 crc kubenswrapper[4757]: I1006 13:40:56.429473 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-qg4qm" Oct 06 13:40:56 crc kubenswrapper[4757]: I1006 13:40:56.432505 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-m4xmh" event={"ID":"c2bf58ce-8c1e-4f9c-935e-3f61f3282676","Type":"ContainerStarted","Data":"bf2c22808f77fcb3df98a84f229c41e3f8c34837568fd5ae00a87c28490bb87f"} Oct 06 13:40:56 crc kubenswrapper[4757]: I1006 13:40:56.435979 4757 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-np4zr container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.29:8443/healthz\": dial tcp 10.217.0.29:8443: connect: connection refused" start-of-body= Oct 06 13:40:56 crc kubenswrapper[4757]: I1006 13:40:56.436021 4757 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-np4zr" podUID="74caa6de-7695-4dcb-9daf-f3368905de1c" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.29:8443/healthz\": dial tcp 10.217.0.29:8443: connect: connection refused" Oct 06 13:40:56 crc kubenswrapper[4757]: I1006 13:40:56.438975 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-fggk5" event={"ID":"0bf03ffd-be54-4071-ab97-013b5aa51a1b","Type":"ContainerStarted","Data":"8b3c4692cf631b4cbb55b779a5f393b1ce4c1270b98c75d24a02fb53bcd39730"} Oct 06 13:40:56 crc kubenswrapper[4757]: I1006 13:40:56.445829 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-5drtn" event={"ID":"2a34da98-aa71-44c5-841b-1209779dadf4","Type":"ContainerStarted","Data":"97176da7167cb411a9a097fb10ef7670187c85bc3e94b1786f1d7c1d9cd4e584"} Oct 06 13:40:56 crc kubenswrapper[4757]: I1006 13:40:56.462666 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-89rvd" event={"ID":"aa703301-7d55-4ee0-b118-61e91935f2de","Type":"ContainerStarted","Data":"23a21f936601f2d0cdace81e92262b64d8ca8abd3b623e7c72450f89248b667f"} Oct 06 13:40:56 crc kubenswrapper[4757]: I1006 13:40:56.466588 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-bg97k" event={"ID":"10a2c372-b600-4c78-a4d5-22f5f8c1e425","Type":"ContainerStarted","Data":"4394538db14f4e955cdf66b6e16158657c3c5ae492ece73823eef11e41e0500c"} Oct 06 13:40:56 crc kubenswrapper[4757]: I1006 13:40:56.470452 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-rvzds"] Oct 06 13:40:56 crc kubenswrapper[4757]: I1006 13:40:56.476828 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-kpprd" event={"ID":"2ddf9932-df5d-40b6-88f8-ced01d618903","Type":"ContainerStarted","Data":"0cc588d9015e54caf31c0943b399cade06a5d4898d312556a62fb003dab1ad05"} Oct 06 13:40:56 crc kubenswrapper[4757]: I1006 13:40:56.477676 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-kpprd" Oct 06 13:40:56 crc kubenswrapper[4757]: I1006 13:40:56.492227 4757 patch_prober.go:28] interesting pod/downloads-7954f5f757-kpprd container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.17:8080/\": dial tcp 10.217.0.17:8080: connect: connection refused" start-of-body= Oct 06 13:40:56 crc kubenswrapper[4757]: I1006 13:40:56.492271 4757 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-kpprd" podUID="2ddf9932-df5d-40b6-88f8-ced01d618903" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.17:8080/\": dial tcp 10.217.0.17:8080: connect: connection refused" Oct 06 13:40:56 crc kubenswrapper[4757]: I1006 13:40:56.494370 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-f5469" event={"ID":"b9804fd5-588e-4638-b1ff-f815e7b5f834","Type":"ContainerStarted","Data":"b59d41d25f8b0c581e3101c892da4615dd7e1327cf07d841759d5c3c5a98feaf"} Oct 06 13:40:56 crc kubenswrapper[4757]: I1006 13:40:56.499268 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-6cwk6" event={"ID":"ade88e7a-7f26-4fcb-b6fa-143a76a39783","Type":"ContainerStarted","Data":"14ac692cbbf58387bcd6b8909734ea0a2778619944506e132dba06d760f6aa01"} Oct 06 13:40:56 crc kubenswrapper[4757]: I1006 13:40:56.500312 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 06 13:40:56 crc kubenswrapper[4757]: E1006 13:40:56.501512 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-06 13:40:57.001474388 +0000 UTC m=+145.498792925 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:40:56 crc kubenswrapper[4757]: I1006 13:40:56.502015 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-99wzw\" (UID: \"a7126d08-f833-4acc-b4f4-e7d4d88b00ca\") " pod="openshift-image-registry/image-registry-697d97f7c8-99wzw" Oct 06 13:40:56 crc kubenswrapper[4757]: I1006 13:40:56.506547 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29329290-rk7ds" event={"ID":"7b93821f-dfad-44a5-a217-eb63987c1f0a","Type":"ContainerStarted","Data":"6d2521b2e9923c3b41dc9454fc3f0355f688fc85c011c6dd6614362cc7cc3031"} Oct 06 13:40:56 crc kubenswrapper[4757]: I1006 13:40:56.507596 4757 patch_prober.go:28] interesting pod/console-operator-58897d9998-d2rg2 container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.10:8443/readyz\": dial tcp 10.217.0.10:8443: connect: connection refused" start-of-body= Oct 06 13:40:56 crc kubenswrapper[4757]: I1006 13:40:56.507651 4757 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-d2rg2" podUID="ead6b56a-18cc-44d4-94bb-4aaecffc945f" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.10:8443/readyz\": dial tcp 10.217.0.10:8443: connect: connection refused" Oct 06 13:40:56 crc kubenswrapper[4757]: E1006 13:40:56.517760 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-06 13:40:57.017739506 +0000 UTC m=+145.515058043 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-99wzw" (UID: "a7126d08-f833-4acc-b4f4-e7d4d88b00ca") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:40:56 crc kubenswrapper[4757]: I1006 13:40:56.532521 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-mtmpm" podStartSLOduration=124.532502811 podStartE2EDuration="2m4.532502811s" podCreationTimestamp="2025-10-06 13:38:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:40:56.530815608 +0000 UTC m=+145.028134155" watchObservedRunningTime="2025-10-06 13:40:56.532502811 +0000 UTC m=+145.029821348" Oct 06 13:40:56 crc kubenswrapper[4757]: I1006 13:40:56.565541 4757 patch_prober.go:28] interesting pod/router-default-5444994796-mtmpm container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 06 13:40:56 crc kubenswrapper[4757]: [-]has-synced failed: reason withheld Oct 06 13:40:56 crc kubenswrapper[4757]: [+]process-running ok Oct 06 13:40:56 crc kubenswrapper[4757]: healthz check failed Oct 06 13:40:56 crc kubenswrapper[4757]: I1006 13:40:56.565614 4757 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-mtmpm" podUID="4845c7dd-e037-41f4-914b-bef0afffaad6" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 06 13:40:56 crc kubenswrapper[4757]: I1006 13:40:56.605817 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 06 13:40:56 crc kubenswrapper[4757]: E1006 13:40:56.606004 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-06 13:40:57.105979507 +0000 UTC m=+145.603298044 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:40:56 crc kubenswrapper[4757]: I1006 13:40:56.610289 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-99wzw\" (UID: \"a7126d08-f833-4acc-b4f4-e7d4d88b00ca\") " pod="openshift-image-registry/image-registry-697d97f7c8-99wzw" Oct 06 13:40:56 crc kubenswrapper[4757]: E1006 13:40:56.610835 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-06 13:40:57.110823586 +0000 UTC m=+145.608142123 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-99wzw" (UID: "a7126d08-f833-4acc-b4f4-e7d4d88b00ca") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:40:56 crc kubenswrapper[4757]: I1006 13:40:56.648126 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-6cwk6" podStartSLOduration=5.648109989 podStartE2EDuration="5.648109989s" podCreationTimestamp="2025-10-06 13:40:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:40:56.647160294 +0000 UTC m=+145.144478831" watchObservedRunningTime="2025-10-06 13:40:56.648109989 +0000 UTC m=+145.145428526" Oct 06 13:40:56 crc kubenswrapper[4757]: I1006 13:40:56.648980 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-qg4qm" podStartSLOduration=125.648971741 podStartE2EDuration="2m5.648971741s" podCreationTimestamp="2025-10-06 13:38:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:40:56.613538196 +0000 UTC m=+145.110856733" watchObservedRunningTime="2025-10-06 13:40:56.648971741 +0000 UTC m=+145.146290278" Oct 06 13:40:56 crc kubenswrapper[4757]: I1006 13:40:56.692823 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-np4zr" podStartSLOduration=124.692804635 podStartE2EDuration="2m4.692804635s" podCreationTimestamp="2025-10-06 13:38:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:40:56.690774061 +0000 UTC m=+145.188092598" watchObservedRunningTime="2025-10-06 13:40:56.692804635 +0000 UTC m=+145.190123172" Oct 06 13:40:56 crc kubenswrapper[4757]: I1006 13:40:56.722678 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 06 13:40:56 crc kubenswrapper[4757]: E1006 13:40:56.722989 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-06 13:40:57.222974057 +0000 UTC m=+145.720292594 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:40:56 crc kubenswrapper[4757]: I1006 13:40:56.808263 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-kpprd" podStartSLOduration=125.808239888 podStartE2EDuration="2m5.808239888s" podCreationTimestamp="2025-10-06 13:38:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:40:56.750180539 +0000 UTC m=+145.247499086" watchObservedRunningTime="2025-10-06 13:40:56.808239888 +0000 UTC m=+145.305558425" Oct 06 13:40:56 crc kubenswrapper[4757]: I1006 13:40:56.809372 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-m4xmh" podStartSLOduration=124.809361699 podStartE2EDuration="2m4.809361699s" podCreationTimestamp="2025-10-06 13:38:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:40:56.807083706 +0000 UTC m=+145.304402253" watchObservedRunningTime="2025-10-06 13:40:56.809361699 +0000 UTC m=+145.306680236" Oct 06 13:40:56 crc kubenswrapper[4757]: I1006 13:40:56.824331 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-99wzw\" (UID: \"a7126d08-f833-4acc-b4f4-e7d4d88b00ca\") " pod="openshift-image-registry/image-registry-697d97f7c8-99wzw" Oct 06 13:40:56 crc kubenswrapper[4757]: E1006 13:40:56.824659 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-06 13:40:57.324646722 +0000 UTC m=+145.821965259 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-99wzw" (UID: "a7126d08-f833-4acc-b4f4-e7d4d88b00ca") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:40:56 crc kubenswrapper[4757]: I1006 13:40:56.847650 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-5drtn" podStartSLOduration=125.847623149 podStartE2EDuration="2m5.847623149s" podCreationTimestamp="2025-10-06 13:38:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:40:56.845807112 +0000 UTC m=+145.343125639" watchObservedRunningTime="2025-10-06 13:40:56.847623149 +0000 UTC m=+145.344941686" Oct 06 13:40:56 crc kubenswrapper[4757]: I1006 13:40:56.926213 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 06 13:40:56 crc kubenswrapper[4757]: E1006 13:40:56.926364 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-06 13:40:57.426336848 +0000 UTC m=+145.923655385 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:40:56 crc kubenswrapper[4757]: I1006 13:40:56.926437 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-99wzw\" (UID: \"a7126d08-f833-4acc-b4f4-e7d4d88b00ca\") " pod="openshift-image-registry/image-registry-697d97f7c8-99wzw" Oct 06 13:40:56 crc kubenswrapper[4757]: E1006 13:40:56.926945 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-06 13:40:57.426939241 +0000 UTC m=+145.924257778 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-99wzw" (UID: "a7126d08-f833-4acc-b4f4-e7d4d88b00ca") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:40:57 crc kubenswrapper[4757]: I1006 13:40:57.028141 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 06 13:40:57 crc kubenswrapper[4757]: E1006 13:40:57.028661 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-06 13:40:57.528641767 +0000 UTC m=+146.025960314 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:40:57 crc kubenswrapper[4757]: I1006 13:40:57.129660 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-99wzw\" (UID: \"a7126d08-f833-4acc-b4f4-e7d4d88b00ca\") " pod="openshift-image-registry/image-registry-697d97f7c8-99wzw" Oct 06 13:40:57 crc kubenswrapper[4757]: E1006 13:40:57.130261 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-06 13:40:57.63024357 +0000 UTC m=+146.127562107 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-99wzw" (UID: "a7126d08-f833-4acc-b4f4-e7d4d88b00ca") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:40:57 crc kubenswrapper[4757]: I1006 13:40:57.231250 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 06 13:40:57 crc kubenswrapper[4757]: E1006 13:40:57.231705 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-06 13:40:57.731688707 +0000 UTC m=+146.229007244 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:40:57 crc kubenswrapper[4757]: I1006 13:40:57.340482 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-99wzw\" (UID: \"a7126d08-f833-4acc-b4f4-e7d4d88b00ca\") " pod="openshift-image-registry/image-registry-697d97f7c8-99wzw" Oct 06 13:40:57 crc kubenswrapper[4757]: E1006 13:40:57.342501 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-06 13:40:57.842479438 +0000 UTC m=+146.339797975 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-99wzw" (UID: "a7126d08-f833-4acc-b4f4-e7d4d88b00ca") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:40:57 crc kubenswrapper[4757]: I1006 13:40:57.450276 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 06 13:40:57 crc kubenswrapper[4757]: E1006 13:40:57.450682 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-06 13:40:57.950662354 +0000 UTC m=+146.447980891 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:40:57 crc kubenswrapper[4757]: I1006 13:40:57.528624 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-w7rv5" event={"ID":"a303cc7b-bff3-4fdd-8106-440b19c277d1","Type":"ContainerStarted","Data":"bd934991ba78b63d52cedd53df9753601d5e0baec2d94a38f823cc31692e8dbf"} Oct 06 13:40:57 crc kubenswrapper[4757]: I1006 13:40:57.536998 4757 patch_prober.go:28] interesting pod/router-default-5444994796-mtmpm container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 06 13:40:57 crc kubenswrapper[4757]: [-]has-synced failed: reason withheld Oct 06 13:40:57 crc kubenswrapper[4757]: [+]process-running ok Oct 06 13:40:57 crc kubenswrapper[4757]: healthz check failed Oct 06 13:40:57 crc kubenswrapper[4757]: I1006 13:40:57.537042 4757 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-mtmpm" podUID="4845c7dd-e037-41f4-914b-bef0afffaad6" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 06 13:40:57 crc kubenswrapper[4757]: I1006 13:40:57.560312 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-w7rv5" podStartSLOduration=125.560294762 podStartE2EDuration="2m5.560294762s" podCreationTimestamp="2025-10-06 13:38:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:40:57.5583517 +0000 UTC m=+146.055670237" watchObservedRunningTime="2025-10-06 13:40:57.560294762 +0000 UTC m=+146.057613299" Oct 06 13:40:57 crc kubenswrapper[4757]: I1006 13:40:57.560982 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-99wzw\" (UID: \"a7126d08-f833-4acc-b4f4-e7d4d88b00ca\") " pod="openshift-image-registry/image-registry-697d97f7c8-99wzw" Oct 06 13:40:57 crc kubenswrapper[4757]: E1006 13:40:57.561431 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-06 13:40:58.061410323 +0000 UTC m=+146.558728920 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-99wzw" (UID: "a7126d08-f833-4acc-b4f4-e7d4d88b00ca") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:40:57 crc kubenswrapper[4757]: I1006 13:40:57.564161 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-gmv42" event={"ID":"285b4432-8af1-4a51-8361-4a2266326d71","Type":"ContainerStarted","Data":"810da8896761728562925e97a55a85b9b530e33bcf02540ec6a22cab1fe29157"} Oct 06 13:40:57 crc kubenswrapper[4757]: I1006 13:40:57.566645 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29329290-rk7ds" event={"ID":"7b93821f-dfad-44a5-a217-eb63987c1f0a","Type":"ContainerStarted","Data":"c9cdccf51ed3ffabdec5ba8c55257e4cd788195638213e89980e5436c5f7d857"} Oct 06 13:40:57 crc kubenswrapper[4757]: I1006 13:40:57.606531 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-qwn5v" event={"ID":"56a6b92e-fa3b-4944-b9a3-512abeb1892d","Type":"ContainerStarted","Data":"e02facce8bff273650dd52b4e6ba151b9fdf7fe49939d6f1e0fdc4247d6e97f2"} Oct 06 13:40:57 crc kubenswrapper[4757]: I1006 13:40:57.612112 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-wzttx" event={"ID":"4f8647a0-accb-4139-8720-290260f22bb0","Type":"ContainerStarted","Data":"364bd183b11a05d4452e28fe0cc773559e87d911dc9f8e58ccfecf04666b952a"} Oct 06 13:40:57 crc kubenswrapper[4757]: I1006 13:40:57.612964 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-wzttx" Oct 06 13:40:57 crc kubenswrapper[4757]: I1006 13:40:57.619321 4757 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-wzttx container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.39:8443/healthz\": dial tcp 10.217.0.39:8443: connect: connection refused" start-of-body= Oct 06 13:40:57 crc kubenswrapper[4757]: I1006 13:40:57.619375 4757 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-wzttx" podUID="4f8647a0-accb-4139-8720-290260f22bb0" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.39:8443/healthz\": dial tcp 10.217.0.39:8443: connect: connection refused" Oct 06 13:40:57 crc kubenswrapper[4757]: I1006 13:40:57.620639 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-2vt8x" event={"ID":"26023e16-6f2a-416b-8ccd-b1ee21bd0a57","Type":"ContainerStarted","Data":"f328090c78aba07c9ea6c17b3ab618b938513a0a3b95094fffa0118bb8c9b34c"} Oct 06 13:40:57 crc kubenswrapper[4757]: I1006 13:40:57.620674 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-2vt8x" event={"ID":"26023e16-6f2a-416b-8ccd-b1ee21bd0a57","Type":"ContainerStarted","Data":"2a9aacf7873392e43957f5fb9eeac55bedffe1e70732a0004605d0e78c85261a"} Oct 06 13:40:57 crc kubenswrapper[4757]: I1006 13:40:57.622795 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-f5469" event={"ID":"b9804fd5-588e-4638-b1ff-f815e7b5f834","Type":"ContainerStarted","Data":"6d672c41786b2ffeaddf158767358df22a70f039022d89e4a01676f45ce870ab"} Oct 06 13:40:57 crc kubenswrapper[4757]: I1006 13:40:57.623586 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-f5469" Oct 06 13:40:57 crc kubenswrapper[4757]: I1006 13:40:57.633973 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-rvzds" event={"ID":"c11f3757-611f-48f6-a4c4-909f4f45ccdf","Type":"ContainerStarted","Data":"7ae99c480d4fc95f50324f32f0fcb2b0823672c8fea4a9ef91168ceb9b8435cd"} Oct 06 13:40:57 crc kubenswrapper[4757]: I1006 13:40:57.639050 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-gmv42" podStartSLOduration=126.639035182 podStartE2EDuration="2m6.639035182s" podCreationTimestamp="2025-10-06 13:38:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:40:57.599737325 +0000 UTC m=+146.097055872" watchObservedRunningTime="2025-10-06 13:40:57.639035182 +0000 UTC m=+146.136353719" Oct 06 13:40:57 crc kubenswrapper[4757]: I1006 13:40:57.642749 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-zt9z4" event={"ID":"e86ac358-5c66-416e-a42d-5429b2d57c86","Type":"ContainerStarted","Data":"4a5d2ce002405d4353ad30cf02ecf3a5c9dab89c82e98a529a69c889c572b689"} Oct 06 13:40:57 crc kubenswrapper[4757]: I1006 13:40:57.643656 4757 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-f5469 container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.24:8080/healthz\": dial tcp 10.217.0.24:8080: connect: connection refused" start-of-body= Oct 06 13:40:57 crc kubenswrapper[4757]: I1006 13:40:57.643716 4757 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-f5469" podUID="b9804fd5-588e-4638-b1ff-f815e7b5f834" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.24:8080/healthz\": dial tcp 10.217.0.24:8080: connect: connection refused" Oct 06 13:40:57 crc kubenswrapper[4757]: I1006 13:40:57.662849 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 06 13:40:57 crc kubenswrapper[4757]: I1006 13:40:57.663173 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29329290-rk7ds" podStartSLOduration=125.663153861 podStartE2EDuration="2m5.663153861s" podCreationTimestamp="2025-10-06 13:38:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:40:57.640787277 +0000 UTC m=+146.138105834" watchObservedRunningTime="2025-10-06 13:40:57.663153861 +0000 UTC m=+146.160472398" Oct 06 13:40:57 crc kubenswrapper[4757]: E1006 13:40:57.663205 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-06 13:40:58.163188823 +0000 UTC m=+146.660507360 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:40:57 crc kubenswrapper[4757]: I1006 13:40:57.663966 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-f5469" podStartSLOduration=125.66396268 podStartE2EDuration="2m5.66396268s" podCreationTimestamp="2025-10-06 13:38:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:40:57.662502977 +0000 UTC m=+146.159821514" watchObservedRunningTime="2025-10-06 13:40:57.66396268 +0000 UTC m=+146.161281217" Oct 06 13:40:57 crc kubenswrapper[4757]: I1006 13:40:57.674287 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-bgct2" event={"ID":"66e3315c-fd06-44fa-9a91-9e2e814618c4","Type":"ContainerStarted","Data":"09797f8f5cfdb9409dd3b2c1575a981f209ed04dc86c3faceaed0d538667542f"} Oct 06 13:40:57 crc kubenswrapper[4757]: I1006 13:40:57.684495 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-fggk5" event={"ID":"0bf03ffd-be54-4071-ab97-013b5aa51a1b","Type":"ContainerStarted","Data":"fc174aa73736f2f3354581579c52dffec79c4f66eeb4b681c384fc552b41ead1"} Oct 06 13:40:57 crc kubenswrapper[4757]: I1006 13:40:57.684546 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-fggk5" event={"ID":"0bf03ffd-be54-4071-ab97-013b5aa51a1b","Type":"ContainerStarted","Data":"c8af6ef67cc0768ff79f327d03b0336320fdcb0dc61cfe5476a84b4bf769b83f"} Oct 06 13:40:57 crc kubenswrapper[4757]: I1006 13:40:57.698426 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-2vt8x" podStartSLOduration=6.698395029 podStartE2EDuration="6.698395029s" podCreationTimestamp="2025-10-06 13:40:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:40:57.687518319 +0000 UTC m=+146.184836866" watchObservedRunningTime="2025-10-06 13:40:57.698395029 +0000 UTC m=+146.195713566" Oct 06 13:40:57 crc kubenswrapper[4757]: I1006 13:40:57.707738 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-fcpcg" event={"ID":"90e799d2-898f-43e4-a937-940d2970d74e","Type":"ContainerStarted","Data":"648c574652121fd16ea354ca2bce6784735ecf6c32f733135799e64e05124dd7"} Oct 06 13:40:57 crc kubenswrapper[4757]: I1006 13:40:57.726887 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-nm7z8" event={"ID":"f3eed275-7acc-408b-8973-84d9c0fa817b","Type":"ContainerStarted","Data":"d4a822fe40738bbd6b87efa518a37b97d185c05b3606a4803729b7546357947d"} Oct 06 13:40:57 crc kubenswrapper[4757]: I1006 13:40:57.730381 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-89rvd" event={"ID":"aa703301-7d55-4ee0-b118-61e91935f2de","Type":"ContainerStarted","Data":"e84c7f93e8b34b48346b9e7da67b274804857fe4c7af356109eb2723af5416c4"} Oct 06 13:40:57 crc kubenswrapper[4757]: I1006 13:40:57.730515 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-89rvd" event={"ID":"aa703301-7d55-4ee0-b118-61e91935f2de","Type":"ContainerStarted","Data":"71f7029ae1c566d4894b32d930e99c6f0dbfa899548d7d65064a08819e4e4304"} Oct 06 13:40:57 crc kubenswrapper[4757]: I1006 13:40:57.738062 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-wzttx" podStartSLOduration=125.73804632 podStartE2EDuration="2m5.73804632s" podCreationTimestamp="2025-10-06 13:38:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:40:57.738005608 +0000 UTC m=+146.235324145" watchObservedRunningTime="2025-10-06 13:40:57.73804632 +0000 UTC m=+146.235364857" Oct 06 13:40:57 crc kubenswrapper[4757]: I1006 13:40:57.739083 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-qwn5v" podStartSLOduration=126.739077038 podStartE2EDuration="2m6.739077038s" podCreationTimestamp="2025-10-06 13:38:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:40:57.714669528 +0000 UTC m=+146.211988065" watchObservedRunningTime="2025-10-06 13:40:57.739077038 +0000 UTC m=+146.236395575" Oct 06 13:40:57 crc kubenswrapper[4757]: I1006 13:40:57.742905 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-sms26" event={"ID":"2431e0a3-d977-4757-a8ed-7382ee29a08b","Type":"ContainerStarted","Data":"bdff64e1b5580c448c24d0a2248cfc9ae1f0bff43cf5352f208b7afd0d847896"} Oct 06 13:40:57 crc kubenswrapper[4757]: I1006 13:40:57.764824 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-zcwgr" event={"ID":"8db12020-2fce-47b5-936e-e792b08976f0","Type":"ContainerStarted","Data":"dbd012bd338afbe873850b01652e418f44073d9581ac20aff8803307728d0617"} Oct 06 13:40:57 crc kubenswrapper[4757]: I1006 13:40:57.765957 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-zcwgr" Oct 06 13:40:57 crc kubenswrapper[4757]: I1006 13:40:57.766794 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-99wzw\" (UID: \"a7126d08-f833-4acc-b4f4-e7d4d88b00ca\") " pod="openshift-image-registry/image-registry-697d97f7c8-99wzw" Oct 06 13:40:57 crc kubenswrapper[4757]: E1006 13:40:57.767442 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-06 13:40:58.267429183 +0000 UTC m=+146.764747720 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-99wzw" (UID: "a7126d08-f833-4acc-b4f4-e7d4d88b00ca") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:40:57 crc kubenswrapper[4757]: I1006 13:40:57.770014 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-89rvd" podStartSLOduration=125.769989847 podStartE2EDuration="2m5.769989847s" podCreationTimestamp="2025-10-06 13:38:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:40:57.760181125 +0000 UTC m=+146.257499672" watchObservedRunningTime="2025-10-06 13:40:57.769989847 +0000 UTC m=+146.267308384" Oct 06 13:40:57 crc kubenswrapper[4757]: I1006 13:40:57.792020 4757 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-zcwgr container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.16:6443/healthz\": dial tcp 10.217.0.16:6443: connect: connection refused" start-of-body= Oct 06 13:40:57 crc kubenswrapper[4757]: I1006 13:40:57.792080 4757 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-zcwgr" podUID="8db12020-2fce-47b5-936e-e792b08976f0" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.16:6443/healthz\": dial tcp 10.217.0.16:6443: connect: connection refused" Oct 06 13:40:57 crc kubenswrapper[4757]: I1006 13:40:57.794974 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-zt9z4" podStartSLOduration=125.794962217 podStartE2EDuration="2m5.794962217s" podCreationTimestamp="2025-10-06 13:38:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:40:57.792809057 +0000 UTC m=+146.290127614" watchObservedRunningTime="2025-10-06 13:40:57.794962217 +0000 UTC m=+146.292280754" Oct 06 13:40:57 crc kubenswrapper[4757]: I1006 13:40:57.796768 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-x8wd5" event={"ID":"b5167c6e-fb82-4a79-a743-aed0c32e8f9a","Type":"ContainerStarted","Data":"b3934ab65eeb4f8de4433e1569f1b61661342bc960b93b2564b17771245e1de8"} Oct 06 13:40:57 crc kubenswrapper[4757]: I1006 13:40:57.797255 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-x8wd5" event={"ID":"b5167c6e-fb82-4a79-a743-aed0c32e8f9a","Type":"ContainerStarted","Data":"230faa5b3285a97065efcc586eb80dbca42424233f66ecda01ceb9c9413d364c"} Oct 06 13:40:57 crc kubenswrapper[4757]: I1006 13:40:57.838401 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-jmp6b" event={"ID":"d4bf9975-f58d-4419-864d-88b28d436c56","Type":"ContainerStarted","Data":"5777648a807acb59dd2e0cd28df46d41b4e96af880509e69a63d5c4f3f722a53"} Oct 06 13:40:57 crc kubenswrapper[4757]: I1006 13:40:57.839086 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-jmp6b" event={"ID":"d4bf9975-f58d-4419-864d-88b28d436c56","Type":"ContainerStarted","Data":"8e8fe3a4065fa61584311c09d8da710f1f1894a8ff7051f470c42eb49f209b2d"} Oct 06 13:40:57 crc kubenswrapper[4757]: I1006 13:40:57.841289 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sbbvc" event={"ID":"0fc6af18-7b97-413d-98f6-df292aaf7e49","Type":"ContainerStarted","Data":"78553827cb9bd0d554ff8e8a1c532f6ace72f8107fa9bf0b1ac2bd06b11d8a48"} Oct 06 13:40:57 crc kubenswrapper[4757]: I1006 13:40:57.841335 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sbbvc" event={"ID":"0fc6af18-7b97-413d-98f6-df292aaf7e49","Type":"ContainerStarted","Data":"11beee137cac203b4c6da04feea5cec4d1d4ae73608657606f5f09610d064fdf"} Oct 06 13:40:57 crc kubenswrapper[4757]: I1006 13:40:57.842146 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sbbvc" Oct 06 13:40:57 crc kubenswrapper[4757]: I1006 13:40:57.846730 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-nm7z8" podStartSLOduration=125.846718413 podStartE2EDuration="2m5.846718413s" podCreationTimestamp="2025-10-06 13:38:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:40:57.845705756 +0000 UTC m=+146.343024303" watchObservedRunningTime="2025-10-06 13:40:57.846718413 +0000 UTC m=+146.344036950" Oct 06 13:40:57 crc kubenswrapper[4757]: I1006 13:40:57.848582 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-bgct2" podStartSLOduration=125.848573962 podStartE2EDuration="2m5.848573962s" podCreationTimestamp="2025-10-06 13:38:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:40:57.823238249 +0000 UTC m=+146.320556786" watchObservedRunningTime="2025-10-06 13:40:57.848573962 +0000 UTC m=+146.345892489" Oct 06 13:40:57 crc kubenswrapper[4757]: I1006 13:40:57.849568 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-rdbhp" event={"ID":"51673d40-dc08-42c2-87ee-0256c42c66df","Type":"ContainerStarted","Data":"3a5c35ed8810fec94b5de3b5a585b2ca73df4e14e2da1f9cdf30d7a4f21671e6"} Oct 06 13:40:57 crc kubenswrapper[4757]: I1006 13:40:57.849599 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-rdbhp" event={"ID":"51673d40-dc08-42c2-87ee-0256c42c66df","Type":"ContainerStarted","Data":"020a6690b87a5340c56477ab8d713be71860971ee7aad4362fd72c17a27517f4"} Oct 06 13:40:57 crc kubenswrapper[4757]: I1006 13:40:57.850376 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-rdbhp" Oct 06 13:40:57 crc kubenswrapper[4757]: I1006 13:40:57.852356 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-gklxq" event={"ID":"8c0d7122-aeb4-4292-a8fc-61cee9bf303c","Type":"ContainerStarted","Data":"d1dd9fcc2aa6b22c0fa5773575a764945c037a39fd212402fde9b6543be4bdbf"} Oct 06 13:40:57 crc kubenswrapper[4757]: I1006 13:40:57.854040 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-bg97k" event={"ID":"10a2c372-b600-4c78-a4d5-22f5f8c1e425","Type":"ContainerStarted","Data":"49791c73d0ad700e02d872467c53c27e1992c37615f81579977a60d92bc143c9"} Oct 06 13:40:57 crc kubenswrapper[4757]: I1006 13:40:57.855426 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-wdjlv" event={"ID":"65b9c584-73e5-4414-bc72-556aabda7064","Type":"ContainerStarted","Data":"85ba377f1f5024cf5119e731f946d1d63b740c959a2df845025a6a7640851409"} Oct 06 13:40:57 crc kubenswrapper[4757]: I1006 13:40:57.855447 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-wdjlv" event={"ID":"65b9c584-73e5-4414-bc72-556aabda7064","Type":"ContainerStarted","Data":"e901d10308dc98783b65404b1eaf890ccfa697f58cbe1d660664436e4cad4160"} Oct 06 13:40:57 crc kubenswrapper[4757]: I1006 13:40:57.856223 4757 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-sbbvc container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.36:5443/healthz\": dial tcp 10.217.0.36:5443: connect: connection refused" start-of-body= Oct 06 13:40:57 crc kubenswrapper[4757]: I1006 13:40:57.856258 4757 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sbbvc" podUID="0fc6af18-7b97-413d-98f6-df292aaf7e49" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.36:5443/healthz\": dial tcp 10.217.0.36:5443: connect: connection refused" Oct 06 13:40:57 crc kubenswrapper[4757]: I1006 13:40:57.856541 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-tddsx" event={"ID":"57071541-5943-4293-ba92-c9fa3dc6ec00","Type":"ContainerStarted","Data":"a073e1bc9582db3009cd67f4dde6ea65eb967ae49e159d169ce52eb66e4dd3ca"} Oct 06 13:40:57 crc kubenswrapper[4757]: I1006 13:40:57.857404 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-75h8j" event={"ID":"e5d9c90b-be15-4ac9-b19c-6d69db82e58f","Type":"ContainerStarted","Data":"f5d5a3eb15bd9a24290d9483b615d0f17434eeff7db082623e0e1df7b355067f"} Oct 06 13:40:57 crc kubenswrapper[4757]: I1006 13:40:57.863105 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-hm8qr" event={"ID":"7724900e-8239-400e-92a8-686e0c85f223","Type":"ContainerStarted","Data":"179399057ee944d7f51b7ed23c4fc6c87bd053f292444ed76b2182a6bcc74a70"} Oct 06 13:40:57 crc kubenswrapper[4757]: I1006 13:40:57.863381 4757 patch_prober.go:28] interesting pod/downloads-7954f5f757-kpprd container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.17:8080/\": dial tcp 10.217.0.17:8080: connect: connection refused" start-of-body= Oct 06 13:40:57 crc kubenswrapper[4757]: I1006 13:40:57.863409 4757 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-kpprd" podUID="2ddf9932-df5d-40b6-88f8-ced01d618903" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.17:8080/\": dial tcp 10.217.0.17:8080: connect: connection refused" Oct 06 13:40:57 crc kubenswrapper[4757]: I1006 13:40:57.867872 4757 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-rdbhp container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.32:8443/healthz\": dial tcp 10.217.0.32:8443: connect: connection refused" start-of-body= Oct 06 13:40:57 crc kubenswrapper[4757]: I1006 13:40:57.867932 4757 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-rdbhp" podUID="51673d40-dc08-42c2-87ee-0256c42c66df" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.32:8443/healthz\": dial tcp 10.217.0.32:8443: connect: connection refused" Oct 06 13:40:57 crc kubenswrapper[4757]: I1006 13:40:57.869076 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 06 13:40:57 crc kubenswrapper[4757]: E1006 13:40:57.870332 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-06 13:40:58.370314693 +0000 UTC m=+146.867633220 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:40:57 crc kubenswrapper[4757]: I1006 13:40:57.870511 4757 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-np4zr container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.29:8443/healthz\": dial tcp 10.217.0.29:8443: connect: connection refused" start-of-body= Oct 06 13:40:57 crc kubenswrapper[4757]: I1006 13:40:57.870551 4757 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-np4zr" podUID="74caa6de-7695-4dcb-9daf-f3368905de1c" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.29:8443/healthz\": dial tcp 10.217.0.29:8443: connect: connection refused" Oct 06 13:40:57 crc kubenswrapper[4757]: I1006 13:40:57.930533 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-fggk5" podStartSLOduration=125.93051119 podStartE2EDuration="2m5.93051119s" podCreationTimestamp="2025-10-06 13:38:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:40:57.913566565 +0000 UTC m=+146.410885122" watchObservedRunningTime="2025-10-06 13:40:57.93051119 +0000 UTC m=+146.427829717" Oct 06 13:40:57 crc kubenswrapper[4757]: I1006 13:40:57.937301 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-fcpcg" podStartSLOduration=125.937280049 podStartE2EDuration="2m5.937280049s" podCreationTimestamp="2025-10-06 13:38:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:40:57.868315488 +0000 UTC m=+146.365634045" watchObservedRunningTime="2025-10-06 13:40:57.937280049 +0000 UTC m=+146.434598576" Oct 06 13:40:57 crc kubenswrapper[4757]: I1006 13:40:57.961515 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-rdbhp" podStartSLOduration=125.961500201 podStartE2EDuration="2m5.961500201s" podCreationTimestamp="2025-10-06 13:38:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:40:57.96145512 +0000 UTC m=+146.458773657" watchObservedRunningTime="2025-10-06 13:40:57.961500201 +0000 UTC m=+146.458818738" Oct 06 13:40:57 crc kubenswrapper[4757]: I1006 13:40:57.970833 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-99wzw\" (UID: \"a7126d08-f833-4acc-b4f4-e7d4d88b00ca\") " pod="openshift-image-registry/image-registry-697d97f7c8-99wzw" Oct 06 13:40:57 crc kubenswrapper[4757]: E1006 13:40:57.972952 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-06 13:40:58.472935063 +0000 UTC m=+146.970253690 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-99wzw" (UID: "a7126d08-f833-4acc-b4f4-e7d4d88b00ca") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:40:57 crc kubenswrapper[4757]: I1006 13:40:57.984080 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-75h8j" podStartSLOduration=125.984064513 podStartE2EDuration="2m5.984064513s" podCreationTimestamp="2025-10-06 13:38:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:40:57.982445743 +0000 UTC m=+146.479764280" watchObservedRunningTime="2025-10-06 13:40:57.984064513 +0000 UTC m=+146.481383050" Oct 06 13:40:58 crc kubenswrapper[4757]: I1006 13:40:58.026011 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-hm8qr" podStartSLOduration=127.025995527 podStartE2EDuration="2m7.025995527s" podCreationTimestamp="2025-10-06 13:38:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:40:58.024026635 +0000 UTC m=+146.521345192" watchObservedRunningTime="2025-10-06 13:40:58.025995527 +0000 UTC m=+146.523314064" Oct 06 13:40:58 crc kubenswrapper[4757]: I1006 13:40:58.072842 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 06 13:40:58 crc kubenswrapper[4757]: E1006 13:40:58.073292 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-06 13:40:58.573278009 +0000 UTC m=+147.070596546 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:40:58 crc kubenswrapper[4757]: I1006 13:40:58.096521 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-jmp6b" podStartSLOduration=127.096503035 podStartE2EDuration="2m7.096503035s" podCreationTimestamp="2025-10-06 13:38:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:40:58.090115679 +0000 UTC m=+146.587434226" watchObservedRunningTime="2025-10-06 13:40:58.096503035 +0000 UTC m=+146.593821572" Oct 06 13:40:58 crc kubenswrapper[4757]: I1006 13:40:58.174719 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-99wzw\" (UID: \"a7126d08-f833-4acc-b4f4-e7d4d88b00ca\") " pod="openshift-image-registry/image-registry-697d97f7c8-99wzw" Oct 06 13:40:58 crc kubenswrapper[4757]: E1006 13:40:58.175191 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-06 13:40:58.675170263 +0000 UTC m=+147.172488880 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-99wzw" (UID: "a7126d08-f833-4acc-b4f4-e7d4d88b00ca") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:40:58 crc kubenswrapper[4757]: I1006 13:40:58.209967 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-gklxq" podStartSLOduration=126.209935703 podStartE2EDuration="2m6.209935703s" podCreationTimestamp="2025-10-06 13:38:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:40:58.138806893 +0000 UTC m=+146.636125420" watchObservedRunningTime="2025-10-06 13:40:58.209935703 +0000 UTC m=+146.707254240" Oct 06 13:40:58 crc kubenswrapper[4757]: I1006 13:40:58.211520 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-sms26" podStartSLOduration=126.211510271 podStartE2EDuration="2m6.211510271s" podCreationTimestamp="2025-10-06 13:38:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:40:58.211294423 +0000 UTC m=+146.708612960" watchObservedRunningTime="2025-10-06 13:40:58.211510271 +0000 UTC m=+146.708828808" Oct 06 13:40:58 crc kubenswrapper[4757]: I1006 13:40:58.273722 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-zcwgr" podStartSLOduration=127.273700483 podStartE2EDuration="2m7.273700483s" podCreationTimestamp="2025-10-06 13:38:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:40:58.271159959 +0000 UTC m=+146.768478516" watchObservedRunningTime="2025-10-06 13:40:58.273700483 +0000 UTC m=+146.771019020" Oct 06 13:40:58 crc kubenswrapper[4757]: I1006 13:40:58.275712 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 06 13:40:58 crc kubenswrapper[4757]: E1006 13:40:58.275891 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-06 13:40:58.775870932 +0000 UTC m=+147.273189469 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:40:58 crc kubenswrapper[4757]: I1006 13:40:58.276569 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-99wzw\" (UID: \"a7126d08-f833-4acc-b4f4-e7d4d88b00ca\") " pod="openshift-image-registry/image-registry-697d97f7c8-99wzw" Oct 06 13:40:58 crc kubenswrapper[4757]: E1006 13:40:58.277034 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-06 13:40:58.777012775 +0000 UTC m=+147.274331312 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-99wzw" (UID: "a7126d08-f833-4acc-b4f4-e7d4d88b00ca") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:40:58 crc kubenswrapper[4757]: I1006 13:40:58.292168 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-bg97k" podStartSLOduration=126.292146672 podStartE2EDuration="2m6.292146672s" podCreationTimestamp="2025-10-06 13:38:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:40:58.288497627 +0000 UTC m=+146.785816164" watchObservedRunningTime="2025-10-06 13:40:58.292146672 +0000 UTC m=+146.789465219" Oct 06 13:40:58 crc kubenswrapper[4757]: I1006 13:40:58.311964 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sbbvc" podStartSLOduration=126.311942711 podStartE2EDuration="2m6.311942711s" podCreationTimestamp="2025-10-06 13:38:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:40:58.308847607 +0000 UTC m=+146.806166144" watchObservedRunningTime="2025-10-06 13:40:58.311942711 +0000 UTC m=+146.809261248" Oct 06 13:40:58 crc kubenswrapper[4757]: I1006 13:40:58.337525 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-bgct2" Oct 06 13:40:58 crc kubenswrapper[4757]: I1006 13:40:58.337615 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-bgct2" Oct 06 13:40:58 crc kubenswrapper[4757]: I1006 13:40:58.339807 4757 patch_prober.go:28] interesting pod/apiserver-7bbb656c7d-bgct2 container/oauth-apiserver namespace/openshift-oauth-apiserver: Startup probe status=failure output="Get \"https://10.217.0.12:8443/livez\": dial tcp 10.217.0.12:8443: connect: connection refused" start-of-body= Oct 06 13:40:58 crc kubenswrapper[4757]: I1006 13:40:58.340038 4757 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-bgct2" podUID="66e3315c-fd06-44fa-9a91-9e2e814618c4" containerName="oauth-apiserver" probeResult="failure" output="Get \"https://10.217.0.12:8443/livez\": dial tcp 10.217.0.12:8443: connect: connection refused" Oct 06 13:40:58 crc kubenswrapper[4757]: I1006 13:40:58.378055 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 06 13:40:58 crc kubenswrapper[4757]: E1006 13:40:58.378246 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-06 13:40:58.878222422 +0000 UTC m=+147.375540959 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:40:58 crc kubenswrapper[4757]: I1006 13:40:58.378475 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-99wzw\" (UID: \"a7126d08-f833-4acc-b4f4-e7d4d88b00ca\") " pod="openshift-image-registry/image-registry-697d97f7c8-99wzw" Oct 06 13:40:58 crc kubenswrapper[4757]: E1006 13:40:58.378755 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-06 13:40:58.878747922 +0000 UTC m=+147.376066449 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-99wzw" (UID: "a7126d08-f833-4acc-b4f4-e7d4d88b00ca") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:40:58 crc kubenswrapper[4757]: I1006 13:40:58.479575 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 06 13:40:58 crc kubenswrapper[4757]: E1006 13:40:58.479764 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-06 13:40:58.979737892 +0000 UTC m=+147.477056429 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:40:58 crc kubenswrapper[4757]: I1006 13:40:58.480246 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-99wzw\" (UID: \"a7126d08-f833-4acc-b4f4-e7d4d88b00ca\") " pod="openshift-image-registry/image-registry-697d97f7c8-99wzw" Oct 06 13:40:58 crc kubenswrapper[4757]: E1006 13:40:58.480560 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-06 13:40:58.980550763 +0000 UTC m=+147.477869300 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-99wzw" (UID: "a7126d08-f833-4acc-b4f4-e7d4d88b00ca") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:40:58 crc kubenswrapper[4757]: I1006 13:40:58.530483 4757 patch_prober.go:28] interesting pod/router-default-5444994796-mtmpm container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 06 13:40:58 crc kubenswrapper[4757]: [-]has-synced failed: reason withheld Oct 06 13:40:58 crc kubenswrapper[4757]: [+]process-running ok Oct 06 13:40:58 crc kubenswrapper[4757]: healthz check failed Oct 06 13:40:58 crc kubenswrapper[4757]: I1006 13:40:58.531014 4757 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-mtmpm" podUID="4845c7dd-e037-41f4-914b-bef0afffaad6" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 06 13:40:58 crc kubenswrapper[4757]: I1006 13:40:58.581732 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 06 13:40:58 crc kubenswrapper[4757]: E1006 13:40:58.582281 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-06 13:40:59.082262569 +0000 UTC m=+147.579581116 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:40:58 crc kubenswrapper[4757]: I1006 13:40:58.683969 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-99wzw\" (UID: \"a7126d08-f833-4acc-b4f4-e7d4d88b00ca\") " pod="openshift-image-registry/image-registry-697d97f7c8-99wzw" Oct 06 13:40:58 crc kubenswrapper[4757]: E1006 13:40:58.684279 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-06 13:40:59.184268717 +0000 UTC m=+147.681587244 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-99wzw" (UID: "a7126d08-f833-4acc-b4f4-e7d4d88b00ca") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:40:58 crc kubenswrapper[4757]: I1006 13:40:58.785156 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 06 13:40:58 crc kubenswrapper[4757]: E1006 13:40:58.785356 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-06 13:40:59.28533122 +0000 UTC m=+147.782649757 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:40:58 crc kubenswrapper[4757]: I1006 13:40:58.785450 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-99wzw\" (UID: \"a7126d08-f833-4acc-b4f4-e7d4d88b00ca\") " pod="openshift-image-registry/image-registry-697d97f7c8-99wzw" Oct 06 13:40:58 crc kubenswrapper[4757]: E1006 13:40:58.785790 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-06 13:40:59.285769486 +0000 UTC m=+147.783088023 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-99wzw" (UID: "a7126d08-f833-4acc-b4f4-e7d4d88b00ca") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:40:58 crc kubenswrapper[4757]: I1006 13:40:58.868612 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-x8wd5" event={"ID":"b5167c6e-fb82-4a79-a743-aed0c32e8f9a","Type":"ContainerStarted","Data":"cff6918597c63557c0f0c7fa13b1dc05ee8b0165e9775081703f36cf5bca9986"} Oct 06 13:40:58 crc kubenswrapper[4757]: I1006 13:40:58.869692 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-x8wd5" Oct 06 13:40:58 crc kubenswrapper[4757]: I1006 13:40:58.871943 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-wdjlv" event={"ID":"65b9c584-73e5-4414-bc72-556aabda7064","Type":"ContainerStarted","Data":"bde51b59266293b264c3f7aba6d0023eceb037b7e40dcd89c2f8d0db9f4fd8e9"} Oct 06 13:40:58 crc kubenswrapper[4757]: I1006 13:40:58.872166 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-wdjlv" Oct 06 13:40:58 crc kubenswrapper[4757]: I1006 13:40:58.874242 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-tddsx" event={"ID":"57071541-5943-4293-ba92-c9fa3dc6ec00","Type":"ContainerStarted","Data":"2601f61bd52aa9fefb2c268fef2c93410607b47d935c4f17ce9df6fbbba0cc85"} Oct 06 13:40:58 crc kubenswrapper[4757]: I1006 13:40:58.875022 4757 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-wzttx container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.39:8443/healthz\": dial tcp 10.217.0.39:8443: connect: connection refused" start-of-body= Oct 06 13:40:58 crc kubenswrapper[4757]: I1006 13:40:58.875027 4757 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-rdbhp container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.32:8443/healthz\": dial tcp 10.217.0.32:8443: connect: connection refused" start-of-body= Oct 06 13:40:58 crc kubenswrapper[4757]: I1006 13:40:58.875125 4757 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-wzttx" podUID="4f8647a0-accb-4139-8720-290260f22bb0" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.39:8443/healthz\": dial tcp 10.217.0.39:8443: connect: connection refused" Oct 06 13:40:58 crc kubenswrapper[4757]: I1006 13:40:58.875158 4757 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-rdbhp" podUID="51673d40-dc08-42c2-87ee-0256c42c66df" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.32:8443/healthz\": dial tcp 10.217.0.32:8443: connect: connection refused" Oct 06 13:40:58 crc kubenswrapper[4757]: I1006 13:40:58.875025 4757 patch_prober.go:28] interesting pod/downloads-7954f5f757-kpprd container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.17:8080/\": dial tcp 10.217.0.17:8080: connect: connection refused" start-of-body= Oct 06 13:40:58 crc kubenswrapper[4757]: I1006 13:40:58.875240 4757 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-kpprd" podUID="2ddf9932-df5d-40b6-88f8-ced01d618903" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.17:8080/\": dial tcp 10.217.0.17:8080: connect: connection refused" Oct 06 13:40:58 crc kubenswrapper[4757]: I1006 13:40:58.876787 4757 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-sbbvc container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.36:5443/healthz\": dial tcp 10.217.0.36:5443: connect: connection refused" start-of-body= Oct 06 13:40:58 crc kubenswrapper[4757]: I1006 13:40:58.876838 4757 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sbbvc" podUID="0fc6af18-7b97-413d-98f6-df292aaf7e49" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.36:5443/healthz\": dial tcp 10.217.0.36:5443: connect: connection refused" Oct 06 13:40:58 crc kubenswrapper[4757]: I1006 13:40:58.876800 4757 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-zcwgr container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.16:6443/healthz\": dial tcp 10.217.0.16:6443: connect: connection refused" start-of-body= Oct 06 13:40:58 crc kubenswrapper[4757]: I1006 13:40:58.876890 4757 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-zcwgr" podUID="8db12020-2fce-47b5-936e-e792b08976f0" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.16:6443/healthz\": dial tcp 10.217.0.16:6443: connect: connection refused" Oct 06 13:40:58 crc kubenswrapper[4757]: I1006 13:40:58.876798 4757 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-f5469 container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.24:8080/healthz\": dial tcp 10.217.0.24:8080: connect: connection refused" start-of-body= Oct 06 13:40:58 crc kubenswrapper[4757]: I1006 13:40:58.876964 4757 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-f5469" podUID="b9804fd5-588e-4638-b1ff-f815e7b5f834" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.24:8080/healthz\": dial tcp 10.217.0.24:8080: connect: connection refused" Oct 06 13:40:58 crc kubenswrapper[4757]: I1006 13:40:58.886600 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 06 13:40:58 crc kubenswrapper[4757]: E1006 13:40:58.886866 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-06 13:40:59.386820209 +0000 UTC m=+147.884138766 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:40:58 crc kubenswrapper[4757]: I1006 13:40:58.887008 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-99wzw\" (UID: \"a7126d08-f833-4acc-b4f4-e7d4d88b00ca\") " pod="openshift-image-registry/image-registry-697d97f7c8-99wzw" Oct 06 13:40:58 crc kubenswrapper[4757]: E1006 13:40:58.887325 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-06 13:40:59.387313207 +0000 UTC m=+147.884631744 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-99wzw" (UID: "a7126d08-f833-4acc-b4f4-e7d4d88b00ca") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:40:58 crc kubenswrapper[4757]: I1006 13:40:58.898272 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-x8wd5" podStartSLOduration=126.8982404 podStartE2EDuration="2m6.8982404s" podCreationTimestamp="2025-10-06 13:38:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:40:58.895028541 +0000 UTC m=+147.392347088" watchObservedRunningTime="2025-10-06 13:40:58.8982404 +0000 UTC m=+147.395558937" Oct 06 13:40:58 crc kubenswrapper[4757]: I1006 13:40:58.940787 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-wdjlv" podStartSLOduration=7.940768565 podStartE2EDuration="7.940768565s" podCreationTimestamp="2025-10-06 13:40:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:40:58.920601403 +0000 UTC m=+147.417919940" watchObservedRunningTime="2025-10-06 13:40:58.940768565 +0000 UTC m=+147.438087102" Oct 06 13:40:58 crc kubenswrapper[4757]: I1006 13:40:58.988409 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 06 13:40:58 crc kubenswrapper[4757]: E1006 13:40:58.990029 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-06 13:40:59.489995639 +0000 UTC m=+147.987314186 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:40:59 crc kubenswrapper[4757]: I1006 13:40:59.090992 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-99wzw\" (UID: \"a7126d08-f833-4acc-b4f4-e7d4d88b00ca\") " pod="openshift-image-registry/image-registry-697d97f7c8-99wzw" Oct 06 13:40:59 crc kubenswrapper[4757]: E1006 13:40:59.091432 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-06 13:40:59.591411966 +0000 UTC m=+148.088730593 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-99wzw" (UID: "a7126d08-f833-4acc-b4f4-e7d4d88b00ca") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:40:59 crc kubenswrapper[4757]: I1006 13:40:59.193035 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 06 13:40:59 crc kubenswrapper[4757]: E1006 13:40:59.193229 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-06 13:40:59.693203265 +0000 UTC m=+148.190521802 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:40:59 crc kubenswrapper[4757]: I1006 13:40:59.193626 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-99wzw\" (UID: \"a7126d08-f833-4acc-b4f4-e7d4d88b00ca\") " pod="openshift-image-registry/image-registry-697d97f7c8-99wzw" Oct 06 13:40:59 crc kubenswrapper[4757]: E1006 13:40:59.193967 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-06 13:40:59.693959373 +0000 UTC m=+148.191277910 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-99wzw" (UID: "a7126d08-f833-4acc-b4f4-e7d4d88b00ca") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:40:59 crc kubenswrapper[4757]: E1006 13:40:59.296065 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-06 13:40:59.796036194 +0000 UTC m=+148.293354771 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:40:59 crc kubenswrapper[4757]: I1006 13:40:59.295887 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 06 13:40:59 crc kubenswrapper[4757]: E1006 13:40:59.297806 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-06 13:40:59.797780657 +0000 UTC m=+148.295099234 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-99wzw" (UID: "a7126d08-f833-4acc-b4f4-e7d4d88b00ca") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:40:59 crc kubenswrapper[4757]: I1006 13:40:59.297891 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-99wzw\" (UID: \"a7126d08-f833-4acc-b4f4-e7d4d88b00ca\") " pod="openshift-image-registry/image-registry-697d97f7c8-99wzw" Oct 06 13:40:59 crc kubenswrapper[4757]: I1006 13:40:59.364965 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-np4zr" Oct 06 13:40:59 crc kubenswrapper[4757]: I1006 13:40:59.389514 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-tddsx" podStartSLOduration=127.389494036 podStartE2EDuration="2m7.389494036s" podCreationTimestamp="2025-10-06 13:38:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:40:58.942010052 +0000 UTC m=+147.439328589" watchObservedRunningTime="2025-10-06 13:40:59.389494036 +0000 UTC m=+147.886812583" Oct 06 13:40:59 crc kubenswrapper[4757]: I1006 13:40:59.399296 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 06 13:40:59 crc kubenswrapper[4757]: E1006 13:40:59.399723 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-06 13:40:59.899691231 +0000 UTC m=+148.397009768 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:40:59 crc kubenswrapper[4757]: I1006 13:40:59.500771 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-99wzw\" (UID: \"a7126d08-f833-4acc-b4f4-e7d4d88b00ca\") " pod="openshift-image-registry/image-registry-697d97f7c8-99wzw" Oct 06 13:40:59 crc kubenswrapper[4757]: E1006 13:40:59.501055 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-06 13:41:00.001043335 +0000 UTC m=+148.498361872 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-99wzw" (UID: "a7126d08-f833-4acc-b4f4-e7d4d88b00ca") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:40:59 crc kubenswrapper[4757]: I1006 13:40:59.531888 4757 patch_prober.go:28] interesting pod/router-default-5444994796-mtmpm container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 06 13:40:59 crc kubenswrapper[4757]: [-]has-synced failed: reason withheld Oct 06 13:40:59 crc kubenswrapper[4757]: [+]process-running ok Oct 06 13:40:59 crc kubenswrapper[4757]: healthz check failed Oct 06 13:40:59 crc kubenswrapper[4757]: I1006 13:40:59.532545 4757 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-mtmpm" podUID="4845c7dd-e037-41f4-914b-bef0afffaad6" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 06 13:40:59 crc kubenswrapper[4757]: I1006 13:40:59.576383 4757 patch_prober.go:28] interesting pod/openshift-config-operator-7777fb866f-qg4qm container/openshift-config-operator namespace/openshift-config-operator: Liveness probe status=failure output="Get \"https://10.217.0.13:8443/healthz\": dial tcp 10.217.0.13:8443: connect: connection refused" start-of-body= Oct 06 13:40:59 crc kubenswrapper[4757]: I1006 13:40:59.576383 4757 patch_prober.go:28] interesting pod/openshift-config-operator-7777fb866f-qg4qm container/openshift-config-operator namespace/openshift-config-operator: Readiness probe status=failure output="Get \"https://10.217.0.13:8443/healthz\": dial tcp 10.217.0.13:8443: connect: connection refused" start-of-body= Oct 06 13:40:59 crc kubenswrapper[4757]: I1006 13:40:59.576513 4757 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-config-operator/openshift-config-operator-7777fb866f-qg4qm" podUID="03f79109-aa85-47fe-9ebb-f14f313aa7f6" containerName="openshift-config-operator" probeResult="failure" output="Get \"https://10.217.0.13:8443/healthz\": dial tcp 10.217.0.13:8443: connect: connection refused" Oct 06 13:40:59 crc kubenswrapper[4757]: I1006 13:40:59.576468 4757 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-config-operator/openshift-config-operator-7777fb866f-qg4qm" podUID="03f79109-aa85-47fe-9ebb-f14f313aa7f6" containerName="openshift-config-operator" probeResult="failure" output="Get \"https://10.217.0.13:8443/healthz\": dial tcp 10.217.0.13:8443: connect: connection refused" Oct 06 13:40:59 crc kubenswrapper[4757]: I1006 13:40:59.602379 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 06 13:40:59 crc kubenswrapper[4757]: E1006 13:40:59.602500 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-06 13:41:00.102481822 +0000 UTC m=+148.599800359 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:40:59 crc kubenswrapper[4757]: I1006 13:40:59.602588 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-99wzw\" (UID: \"a7126d08-f833-4acc-b4f4-e7d4d88b00ca\") " pod="openshift-image-registry/image-registry-697d97f7c8-99wzw" Oct 06 13:40:59 crc kubenswrapper[4757]: E1006 13:40:59.602856 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-06 13:41:00.102847186 +0000 UTC m=+148.600165723 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-99wzw" (UID: "a7126d08-f833-4acc-b4f4-e7d4d88b00ca") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:40:59 crc kubenswrapper[4757]: I1006 13:40:59.703589 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 06 13:40:59 crc kubenswrapper[4757]: E1006 13:40:59.703744 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-06 13:41:00.203722311 +0000 UTC m=+148.701040848 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:40:59 crc kubenswrapper[4757]: I1006 13:40:59.704361 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-99wzw\" (UID: \"a7126d08-f833-4acc-b4f4-e7d4d88b00ca\") " pod="openshift-image-registry/image-registry-697d97f7c8-99wzw" Oct 06 13:40:59 crc kubenswrapper[4757]: E1006 13:40:59.704778 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-06 13:41:00.20474719 +0000 UTC m=+148.702065727 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-99wzw" (UID: "a7126d08-f833-4acc-b4f4-e7d4d88b00ca") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:40:59 crc kubenswrapper[4757]: I1006 13:40:59.805326 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 06 13:40:59 crc kubenswrapper[4757]: E1006 13:40:59.805507 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-06 13:41:00.30546905 +0000 UTC m=+148.802787587 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:40:59 crc kubenswrapper[4757]: I1006 13:40:59.805634 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-99wzw\" (UID: \"a7126d08-f833-4acc-b4f4-e7d4d88b00ca\") " pod="openshift-image-registry/image-registry-697d97f7c8-99wzw" Oct 06 13:40:59 crc kubenswrapper[4757]: E1006 13:40:59.805939 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-06 13:41:00.305925256 +0000 UTC m=+148.803243783 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-99wzw" (UID: "a7126d08-f833-4acc-b4f4-e7d4d88b00ca") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:40:59 crc kubenswrapper[4757]: I1006 13:40:59.882054 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-rvzds" event={"ID":"c11f3757-611f-48f6-a4c4-909f4f45ccdf","Type":"ContainerStarted","Data":"7004811d539fbff377d6d131573cbff01110855b75d2f0b2705a00acff64cac3"} Oct 06 13:40:59 crc kubenswrapper[4757]: I1006 13:40:59.884713 4757 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-f5469 container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.24:8080/healthz\": dial tcp 10.217.0.24:8080: connect: connection refused" start-of-body= Oct 06 13:40:59 crc kubenswrapper[4757]: I1006 13:40:59.884771 4757 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-f5469" podUID="b9804fd5-588e-4638-b1ff-f815e7b5f834" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.24:8080/healthz\": dial tcp 10.217.0.24:8080: connect: connection refused" Oct 06 13:40:59 crc kubenswrapper[4757]: I1006 13:40:59.885591 4757 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-rdbhp container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.32:8443/healthz\": dial tcp 10.217.0.32:8443: connect: connection refused" start-of-body= Oct 06 13:40:59 crc kubenswrapper[4757]: I1006 13:40:59.885641 4757 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-rdbhp" podUID="51673d40-dc08-42c2-87ee-0256c42c66df" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.32:8443/healthz\": dial tcp 10.217.0.32:8443: connect: connection refused" Oct 06 13:40:59 crc kubenswrapper[4757]: I1006 13:40:59.891651 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-wzttx" Oct 06 13:40:59 crc kubenswrapper[4757]: I1006 13:40:59.907363 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 06 13:40:59 crc kubenswrapper[4757]: E1006 13:40:59.907759 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-06 13:41:00.407744947 +0000 UTC m=+148.905063484 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:41:00 crc kubenswrapper[4757]: I1006 13:41:00.009038 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-99wzw\" (UID: \"a7126d08-f833-4acc-b4f4-e7d4d88b00ca\") " pod="openshift-image-registry/image-registry-697d97f7c8-99wzw" Oct 06 13:41:00 crc kubenswrapper[4757]: E1006 13:41:00.010727 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-06 13:41:00.51070757 +0000 UTC m=+149.008026217 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-99wzw" (UID: "a7126d08-f833-4acc-b4f4-e7d4d88b00ca") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:41:00 crc kubenswrapper[4757]: I1006 13:41:00.110887 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 06 13:41:00 crc kubenswrapper[4757]: I1006 13:41:00.111242 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 06 13:41:00 crc kubenswrapper[4757]: I1006 13:41:00.111281 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 06 13:41:00 crc kubenswrapper[4757]: E1006 13:41:00.112148 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-06 13:41:00.612118566 +0000 UTC m=+149.109437103 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:41:00 crc kubenswrapper[4757]: I1006 13:41:00.112777 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 06 13:41:00 crc kubenswrapper[4757]: I1006 13:41:00.124142 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 06 13:41:00 crc kubenswrapper[4757]: I1006 13:41:00.207121 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 06 13:41:00 crc kubenswrapper[4757]: I1006 13:41:00.212859 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 06 13:41:00 crc kubenswrapper[4757]: I1006 13:41:00.213056 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-99wzw\" (UID: \"a7126d08-f833-4acc-b4f4-e7d4d88b00ca\") " pod="openshift-image-registry/image-registry-697d97f7c8-99wzw" Oct 06 13:41:00 crc kubenswrapper[4757]: I1006 13:41:00.213242 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 06 13:41:00 crc kubenswrapper[4757]: E1006 13:41:00.213400 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-06 13:41:00.713384106 +0000 UTC m=+149.210702643 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-99wzw" (UID: "a7126d08-f833-4acc-b4f4-e7d4d88b00ca") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:41:00 crc kubenswrapper[4757]: I1006 13:41:00.216761 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 06 13:41:00 crc kubenswrapper[4757]: I1006 13:41:00.222873 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 06 13:41:00 crc kubenswrapper[4757]: I1006 13:41:00.299275 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 06 13:41:00 crc kubenswrapper[4757]: I1006 13:41:00.314545 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 06 13:41:00 crc kubenswrapper[4757]: E1006 13:41:00.314950 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-06 13:41:00.814928557 +0000 UTC m=+149.312247094 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:41:00 crc kubenswrapper[4757]: I1006 13:41:00.347450 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Oct 06 13:41:00 crc kubenswrapper[4757]: I1006 13:41:00.348141 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 06 13:41:00 crc kubenswrapper[4757]: I1006 13:41:00.364408 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Oct 06 13:41:00 crc kubenswrapper[4757]: I1006 13:41:00.373225 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Oct 06 13:41:00 crc kubenswrapper[4757]: I1006 13:41:00.379483 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Oct 06 13:41:00 crc kubenswrapper[4757]: I1006 13:41:00.412378 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-zcwgr" Oct 06 13:41:00 crc kubenswrapper[4757]: I1006 13:41:00.416797 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/02c5c000-37ed-4b65-bfbc-8cef2e245fe3-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"02c5c000-37ed-4b65-bfbc-8cef2e245fe3\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 06 13:41:00 crc kubenswrapper[4757]: I1006 13:41:00.416859 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/02c5c000-37ed-4b65-bfbc-8cef2e245fe3-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"02c5c000-37ed-4b65-bfbc-8cef2e245fe3\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 06 13:41:00 crc kubenswrapper[4757]: I1006 13:41:00.416918 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-99wzw\" (UID: \"a7126d08-f833-4acc-b4f4-e7d4d88b00ca\") " pod="openshift-image-registry/image-registry-697d97f7c8-99wzw" Oct 06 13:41:00 crc kubenswrapper[4757]: E1006 13:41:00.417212 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-06 13:41:00.917201484 +0000 UTC m=+149.414520021 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-99wzw" (UID: "a7126d08-f833-4acc-b4f4-e7d4d88b00ca") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:41:00 crc kubenswrapper[4757]: I1006 13:41:00.497154 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 06 13:41:00 crc kubenswrapper[4757]: I1006 13:41:00.508931 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" Oct 06 13:41:00 crc kubenswrapper[4757]: I1006 13:41:00.517855 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 06 13:41:00 crc kubenswrapper[4757]: I1006 13:41:00.518221 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/02c5c000-37ed-4b65-bfbc-8cef2e245fe3-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"02c5c000-37ed-4b65-bfbc-8cef2e245fe3\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 06 13:41:00 crc kubenswrapper[4757]: I1006 13:41:00.518318 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/02c5c000-37ed-4b65-bfbc-8cef2e245fe3-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"02c5c000-37ed-4b65-bfbc-8cef2e245fe3\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 06 13:41:00 crc kubenswrapper[4757]: E1006 13:41:00.518658 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-06 13:41:01.018640361 +0000 UTC m=+149.515958908 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:41:00 crc kubenswrapper[4757]: I1006 13:41:00.518694 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/02c5c000-37ed-4b65-bfbc-8cef2e245fe3-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"02c5c000-37ed-4b65-bfbc-8cef2e245fe3\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 06 13:41:00 crc kubenswrapper[4757]: I1006 13:41:00.541320 4757 patch_prober.go:28] interesting pod/router-default-5444994796-mtmpm container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 06 13:41:00 crc kubenswrapper[4757]: [-]has-synced failed: reason withheld Oct 06 13:41:00 crc kubenswrapper[4757]: [+]process-running ok Oct 06 13:41:00 crc kubenswrapper[4757]: healthz check failed Oct 06 13:41:00 crc kubenswrapper[4757]: I1006 13:41:00.541374 4757 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-mtmpm" podUID="4845c7dd-e037-41f4-914b-bef0afffaad6" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 06 13:41:00 crc kubenswrapper[4757]: I1006 13:41:00.619740 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-99wzw\" (UID: \"a7126d08-f833-4acc-b4f4-e7d4d88b00ca\") " pod="openshift-image-registry/image-registry-697d97f7c8-99wzw" Oct 06 13:41:00 crc kubenswrapper[4757]: E1006 13:41:00.620497 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-06 13:41:01.120478742 +0000 UTC m=+149.617797279 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-99wzw" (UID: "a7126d08-f833-4acc-b4f4-e7d4d88b00ca") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:41:00 crc kubenswrapper[4757]: I1006 13:41:00.620922 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/02c5c000-37ed-4b65-bfbc-8cef2e245fe3-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"02c5c000-37ed-4b65-bfbc-8cef2e245fe3\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 06 13:41:00 crc kubenswrapper[4757]: I1006 13:41:00.692443 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 06 13:41:00 crc kubenswrapper[4757]: I1006 13:41:00.722669 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 06 13:41:00 crc kubenswrapper[4757]: E1006 13:41:00.723466 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-06 13:41:01.223449706 +0000 UTC m=+149.720768243 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:41:00 crc kubenswrapper[4757]: I1006 13:41:00.825718 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-99wzw\" (UID: \"a7126d08-f833-4acc-b4f4-e7d4d88b00ca\") " pod="openshift-image-registry/image-registry-697d97f7c8-99wzw" Oct 06 13:41:00 crc kubenswrapper[4757]: E1006 13:41:00.826105 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-06 13:41:01.326066166 +0000 UTC m=+149.823384703 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-99wzw" (UID: "a7126d08-f833-4acc-b4f4-e7d4d88b00ca") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:41:00 crc kubenswrapper[4757]: I1006 13:41:00.887911 4757 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-sbbvc container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.36:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Oct 06 13:41:00 crc kubenswrapper[4757]: I1006 13:41:00.887973 4757 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sbbvc" podUID="0fc6af18-7b97-413d-98f6-df292aaf7e49" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.36:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Oct 06 13:41:00 crc kubenswrapper[4757]: I1006 13:41:00.926702 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 06 13:41:00 crc kubenswrapper[4757]: E1006 13:41:00.927038 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-06 13:41:01.427023605 +0000 UTC m=+149.924342132 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:41:01 crc kubenswrapper[4757]: I1006 13:41:01.032551 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-99wzw\" (UID: \"a7126d08-f833-4acc-b4f4-e7d4d88b00ca\") " pod="openshift-image-registry/image-registry-697d97f7c8-99wzw" Oct 06 13:41:01 crc kubenswrapper[4757]: E1006 13:41:01.034211 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-06 13:41:01.534194164 +0000 UTC m=+150.031512701 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-99wzw" (UID: "a7126d08-f833-4acc-b4f4-e7d4d88b00ca") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:41:01 crc kubenswrapper[4757]: I1006 13:41:01.136175 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 06 13:41:01 crc kubenswrapper[4757]: E1006 13:41:01.136547 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-06 13:41:01.636527173 +0000 UTC m=+150.133845710 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:41:01 crc kubenswrapper[4757]: I1006 13:41:01.238432 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-99wzw\" (UID: \"a7126d08-f833-4acc-b4f4-e7d4d88b00ca\") " pod="openshift-image-registry/image-registry-697d97f7c8-99wzw" Oct 06 13:41:01 crc kubenswrapper[4757]: E1006 13:41:01.238889 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-06 13:41:01.738878544 +0000 UTC m=+150.236197081 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-99wzw" (UID: "a7126d08-f833-4acc-b4f4-e7d4d88b00ca") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:41:01 crc kubenswrapper[4757]: W1006 13:41:01.244249 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3b6479f0_333b_4a96_9adf_2099afdc2447.slice/crio-04ef55e24ef1d9c950367a5dc8215d5159d6f38d1333ae141f9f7da07dcdbeb1 WatchSource:0}: Error finding container 04ef55e24ef1d9c950367a5dc8215d5159d6f38d1333ae141f9f7da07dcdbeb1: Status 404 returned error can't find the container with id 04ef55e24ef1d9c950367a5dc8215d5159d6f38d1333ae141f9f7da07dcdbeb1 Oct 06 13:41:01 crc kubenswrapper[4757]: I1006 13:41:01.339506 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 06 13:41:01 crc kubenswrapper[4757]: E1006 13:41:01.340112 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-06 13:41:01.840069371 +0000 UTC m=+150.337387908 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:41:01 crc kubenswrapper[4757]: I1006 13:41:01.441142 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-99wzw\" (UID: \"a7126d08-f833-4acc-b4f4-e7d4d88b00ca\") " pod="openshift-image-registry/image-registry-697d97f7c8-99wzw" Oct 06 13:41:01 crc kubenswrapper[4757]: E1006 13:41:01.441469 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-06 13:41:01.941454886 +0000 UTC m=+150.438773423 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-99wzw" (UID: "a7126d08-f833-4acc-b4f4-e7d4d88b00ca") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:41:01 crc kubenswrapper[4757]: I1006 13:41:01.532424 4757 patch_prober.go:28] interesting pod/router-default-5444994796-mtmpm container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 06 13:41:01 crc kubenswrapper[4757]: [-]has-synced failed: reason withheld Oct 06 13:41:01 crc kubenswrapper[4757]: [+]process-running ok Oct 06 13:41:01 crc kubenswrapper[4757]: healthz check failed Oct 06 13:41:01 crc kubenswrapper[4757]: I1006 13:41:01.532499 4757 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-mtmpm" podUID="4845c7dd-e037-41f4-914b-bef0afffaad6" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 06 13:41:01 crc kubenswrapper[4757]: I1006 13:41:01.542321 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 06 13:41:01 crc kubenswrapper[4757]: E1006 13:41:01.542497 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-06 13:41:02.042463387 +0000 UTC m=+150.539781924 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:41:01 crc kubenswrapper[4757]: I1006 13:41:01.542580 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-99wzw\" (UID: \"a7126d08-f833-4acc-b4f4-e7d4d88b00ca\") " pod="openshift-image-registry/image-registry-697d97f7c8-99wzw" Oct 06 13:41:01 crc kubenswrapper[4757]: E1006 13:41:01.542939 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-06 13:41:02.042929794 +0000 UTC m=+150.540248331 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-99wzw" (UID: "a7126d08-f833-4acc-b4f4-e7d4d88b00ca") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:41:01 crc kubenswrapper[4757]: I1006 13:41:01.572876 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Oct 06 13:41:01 crc kubenswrapper[4757]: W1006 13:41:01.584303 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod02c5c000_37ed_4b65_bfbc_8cef2e245fe3.slice/crio-81db6b01b19e668d9efd200a8902b05789cc4490af847b7ee7ca344c276e5ed9 WatchSource:0}: Error finding container 81db6b01b19e668d9efd200a8902b05789cc4490af847b7ee7ca344c276e5ed9: Status 404 returned error can't find the container with id 81db6b01b19e668d9efd200a8902b05789cc4490af847b7ee7ca344c276e5ed9 Oct 06 13:41:01 crc kubenswrapper[4757]: I1006 13:41:01.644701 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 06 13:41:01 crc kubenswrapper[4757]: E1006 13:41:01.645352 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-06 13:41:02.145315585 +0000 UTC m=+150.642634122 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:41:01 crc kubenswrapper[4757]: I1006 13:41:01.749952 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-99wzw\" (UID: \"a7126d08-f833-4acc-b4f4-e7d4d88b00ca\") " pod="openshift-image-registry/image-registry-697d97f7c8-99wzw" Oct 06 13:41:01 crc kubenswrapper[4757]: E1006 13:41:01.750346 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-06 13:41:02.250332514 +0000 UTC m=+150.747651051 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-99wzw" (UID: "a7126d08-f833-4acc-b4f4-e7d4d88b00ca") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:41:01 crc kubenswrapper[4757]: I1006 13:41:01.851680 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 06 13:41:01 crc kubenswrapper[4757]: E1006 13:41:01.851904 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-06 13:41:02.351874665 +0000 UTC m=+150.849193192 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:41:01 crc kubenswrapper[4757]: I1006 13:41:01.852018 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-99wzw\" (UID: \"a7126d08-f833-4acc-b4f4-e7d4d88b00ca\") " pod="openshift-image-registry/image-registry-697d97f7c8-99wzw" Oct 06 13:41:01 crc kubenswrapper[4757]: E1006 13:41:01.852448 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-06 13:41:02.352434725 +0000 UTC m=+150.849753262 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-99wzw" (UID: "a7126d08-f833-4acc-b4f4-e7d4d88b00ca") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:41:01 crc kubenswrapper[4757]: I1006 13:41:01.901995 4757 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Oct 06 13:41:01 crc kubenswrapper[4757]: I1006 13:41:01.906488 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"534274eb7a316b6130c3164e9ee9da268688f9fb2379043459abf7a85a293615"} Oct 06 13:41:01 crc kubenswrapper[4757]: I1006 13:41:01.906529 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"2c2f5810fe6922ea36b0b411618c7e4f9053daf2bb34ab30fdd5d33d43989f1f"} Oct 06 13:41:01 crc kubenswrapper[4757]: I1006 13:41:01.909388 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"747b028e2f4429d3134bc5d46edd4bdf3b8f8d4f586642388cf0ff9d72da39d5"} Oct 06 13:41:01 crc kubenswrapper[4757]: I1006 13:41:01.909419 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"04ef55e24ef1d9c950367a5dc8215d5159d6f38d1333ae141f9f7da07dcdbeb1"} Oct 06 13:41:01 crc kubenswrapper[4757]: I1006 13:41:01.909603 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 06 13:41:01 crc kubenswrapper[4757]: I1006 13:41:01.911748 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"61bf91413111a08d76b0c1a14b24ab9c398e40acfca79b4df6aeeaca48a3c230"} Oct 06 13:41:01 crc kubenswrapper[4757]: I1006 13:41:01.911793 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"e3f3e156ec9f86d60bdc09e9cf290d9be20ef610fa21447dad40a197a83360b2"} Oct 06 13:41:01 crc kubenswrapper[4757]: I1006 13:41:01.915451 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-rvzds" event={"ID":"c11f3757-611f-48f6-a4c4-909f4f45ccdf","Type":"ContainerStarted","Data":"8af4cf0f6e583940096647a199fce9f80a527c3091e097f2db9be45af5d5e7f9"} Oct 06 13:41:01 crc kubenswrapper[4757]: I1006 13:41:01.917927 4757 generic.go:334] "Generic (PLEG): container finished" podID="7b93821f-dfad-44a5-a217-eb63987c1f0a" containerID="c9cdccf51ed3ffabdec5ba8c55257e4cd788195638213e89980e5436c5f7d857" exitCode=0 Oct 06 13:41:01 crc kubenswrapper[4757]: I1006 13:41:01.918005 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29329290-rk7ds" event={"ID":"7b93821f-dfad-44a5-a217-eb63987c1f0a","Type":"ContainerDied","Data":"c9cdccf51ed3ffabdec5ba8c55257e4cd788195638213e89980e5436c5f7d857"} Oct 06 13:41:01 crc kubenswrapper[4757]: I1006 13:41:01.919041 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"02c5c000-37ed-4b65-bfbc-8cef2e245fe3","Type":"ContainerStarted","Data":"81db6b01b19e668d9efd200a8902b05789cc4490af847b7ee7ca344c276e5ed9"} Oct 06 13:41:01 crc kubenswrapper[4757]: I1006 13:41:01.953186 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 06 13:41:01 crc kubenswrapper[4757]: E1006 13:41:01.953519 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-06 13:41:02.453502838 +0000 UTC m=+150.950821385 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:41:02 crc kubenswrapper[4757]: I1006 13:41:02.001362 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-bdfrj"] Oct 06 13:41:02 crc kubenswrapper[4757]: I1006 13:41:02.002440 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bdfrj" Oct 06 13:41:02 crc kubenswrapper[4757]: I1006 13:41:02.009695 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Oct 06 13:41:02 crc kubenswrapper[4757]: I1006 13:41:02.012672 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-bdfrj"] Oct 06 13:41:02 crc kubenswrapper[4757]: I1006 13:41:02.054665 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fpl7n\" (UniqueName: \"kubernetes.io/projected/ab36484b-2956-4a35-8379-6b1fc3ffca49-kube-api-access-fpl7n\") pod \"community-operators-bdfrj\" (UID: \"ab36484b-2956-4a35-8379-6b1fc3ffca49\") " pod="openshift-marketplace/community-operators-bdfrj" Oct 06 13:41:02 crc kubenswrapper[4757]: I1006 13:41:02.054711 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ab36484b-2956-4a35-8379-6b1fc3ffca49-utilities\") pod \"community-operators-bdfrj\" (UID: \"ab36484b-2956-4a35-8379-6b1fc3ffca49\") " pod="openshift-marketplace/community-operators-bdfrj" Oct 06 13:41:02 crc kubenswrapper[4757]: I1006 13:41:02.054739 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ab36484b-2956-4a35-8379-6b1fc3ffca49-catalog-content\") pod \"community-operators-bdfrj\" (UID: \"ab36484b-2956-4a35-8379-6b1fc3ffca49\") " pod="openshift-marketplace/community-operators-bdfrj" Oct 06 13:41:02 crc kubenswrapper[4757]: I1006 13:41:02.054884 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-99wzw\" (UID: \"a7126d08-f833-4acc-b4f4-e7d4d88b00ca\") " pod="openshift-image-registry/image-registry-697d97f7c8-99wzw" Oct 06 13:41:02 crc kubenswrapper[4757]: E1006 13:41:02.055246 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-06 13:41:02.555233676 +0000 UTC m=+151.052552213 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-99wzw" (UID: "a7126d08-f833-4acc-b4f4-e7d4d88b00ca") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:41:02 crc kubenswrapper[4757]: I1006 13:41:02.156447 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 06 13:41:02 crc kubenswrapper[4757]: I1006 13:41:02.156641 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fpl7n\" (UniqueName: \"kubernetes.io/projected/ab36484b-2956-4a35-8379-6b1fc3ffca49-kube-api-access-fpl7n\") pod \"community-operators-bdfrj\" (UID: \"ab36484b-2956-4a35-8379-6b1fc3ffca49\") " pod="openshift-marketplace/community-operators-bdfrj" Oct 06 13:41:02 crc kubenswrapper[4757]: E1006 13:41:02.156690 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-06 13:41:02.656636562 +0000 UTC m=+151.153955099 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:41:02 crc kubenswrapper[4757]: I1006 13:41:02.156766 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ab36484b-2956-4a35-8379-6b1fc3ffca49-utilities\") pod \"community-operators-bdfrj\" (UID: \"ab36484b-2956-4a35-8379-6b1fc3ffca49\") " pod="openshift-marketplace/community-operators-bdfrj" Oct 06 13:41:02 crc kubenswrapper[4757]: I1006 13:41:02.156872 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ab36484b-2956-4a35-8379-6b1fc3ffca49-catalog-content\") pod \"community-operators-bdfrj\" (UID: \"ab36484b-2956-4a35-8379-6b1fc3ffca49\") " pod="openshift-marketplace/community-operators-bdfrj" Oct 06 13:41:02 crc kubenswrapper[4757]: I1006 13:41:02.157483 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ab36484b-2956-4a35-8379-6b1fc3ffca49-utilities\") pod \"community-operators-bdfrj\" (UID: \"ab36484b-2956-4a35-8379-6b1fc3ffca49\") " pod="openshift-marketplace/community-operators-bdfrj" Oct 06 13:41:02 crc kubenswrapper[4757]: I1006 13:41:02.157596 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ab36484b-2956-4a35-8379-6b1fc3ffca49-catalog-content\") pod \"community-operators-bdfrj\" (UID: \"ab36484b-2956-4a35-8379-6b1fc3ffca49\") " pod="openshift-marketplace/community-operators-bdfrj" Oct 06 13:41:02 crc kubenswrapper[4757]: I1006 13:41:02.175842 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fpl7n\" (UniqueName: \"kubernetes.io/projected/ab36484b-2956-4a35-8379-6b1fc3ffca49-kube-api-access-fpl7n\") pod \"community-operators-bdfrj\" (UID: \"ab36484b-2956-4a35-8379-6b1fc3ffca49\") " pod="openshift-marketplace/community-operators-bdfrj" Oct 06 13:41:02 crc kubenswrapper[4757]: I1006 13:41:02.188798 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-h5mpv"] Oct 06 13:41:02 crc kubenswrapper[4757]: I1006 13:41:02.190063 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-h5mpv" Oct 06 13:41:02 crc kubenswrapper[4757]: I1006 13:41:02.194818 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Oct 06 13:41:02 crc kubenswrapper[4757]: I1006 13:41:02.205560 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-h5mpv"] Oct 06 13:41:02 crc kubenswrapper[4757]: I1006 13:41:02.258030 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/901fbe7e-0856-4210-b723-30952e1fe25d-utilities\") pod \"certified-operators-h5mpv\" (UID: \"901fbe7e-0856-4210-b723-30952e1fe25d\") " pod="openshift-marketplace/certified-operators-h5mpv" Oct 06 13:41:02 crc kubenswrapper[4757]: I1006 13:41:02.258130 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-99wzw\" (UID: \"a7126d08-f833-4acc-b4f4-e7d4d88b00ca\") " pod="openshift-image-registry/image-registry-697d97f7c8-99wzw" Oct 06 13:41:02 crc kubenswrapper[4757]: I1006 13:41:02.258175 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ktf9k\" (UniqueName: \"kubernetes.io/projected/901fbe7e-0856-4210-b723-30952e1fe25d-kube-api-access-ktf9k\") pod \"certified-operators-h5mpv\" (UID: \"901fbe7e-0856-4210-b723-30952e1fe25d\") " pod="openshift-marketplace/certified-operators-h5mpv" Oct 06 13:41:02 crc kubenswrapper[4757]: I1006 13:41:02.258201 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/901fbe7e-0856-4210-b723-30952e1fe25d-catalog-content\") pod \"certified-operators-h5mpv\" (UID: \"901fbe7e-0856-4210-b723-30952e1fe25d\") " pod="openshift-marketplace/certified-operators-h5mpv" Oct 06 13:41:02 crc kubenswrapper[4757]: E1006 13:41:02.272786 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-06 13:41:02.772767689 +0000 UTC m=+151.270086226 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-99wzw" (UID: "a7126d08-f833-4acc-b4f4-e7d4d88b00ca") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:41:02 crc kubenswrapper[4757]: I1006 13:41:02.340397 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bdfrj" Oct 06 13:41:02 crc kubenswrapper[4757]: I1006 13:41:02.361730 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 06 13:41:02 crc kubenswrapper[4757]: E1006 13:41:02.361845 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-06 13:41:02.86182445 +0000 UTC m=+151.359142987 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:41:02 crc kubenswrapper[4757]: I1006 13:41:02.362203 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/901fbe7e-0856-4210-b723-30952e1fe25d-utilities\") pod \"certified-operators-h5mpv\" (UID: \"901fbe7e-0856-4210-b723-30952e1fe25d\") " pod="openshift-marketplace/certified-operators-h5mpv" Oct 06 13:41:02 crc kubenswrapper[4757]: I1006 13:41:02.362256 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-99wzw\" (UID: \"a7126d08-f833-4acc-b4f4-e7d4d88b00ca\") " pod="openshift-image-registry/image-registry-697d97f7c8-99wzw" Oct 06 13:41:02 crc kubenswrapper[4757]: I1006 13:41:02.362284 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ktf9k\" (UniqueName: \"kubernetes.io/projected/901fbe7e-0856-4210-b723-30952e1fe25d-kube-api-access-ktf9k\") pod \"certified-operators-h5mpv\" (UID: \"901fbe7e-0856-4210-b723-30952e1fe25d\") " pod="openshift-marketplace/certified-operators-h5mpv" Oct 06 13:41:02 crc kubenswrapper[4757]: I1006 13:41:02.362312 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/901fbe7e-0856-4210-b723-30952e1fe25d-catalog-content\") pod \"certified-operators-h5mpv\" (UID: \"901fbe7e-0856-4210-b723-30952e1fe25d\") " pod="openshift-marketplace/certified-operators-h5mpv" Oct 06 13:41:02 crc kubenswrapper[4757]: E1006 13:41:02.362634 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-06 13:41:02.862615979 +0000 UTC m=+151.359934516 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-99wzw" (UID: "a7126d08-f833-4acc-b4f4-e7d4d88b00ca") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 06 13:41:02 crc kubenswrapper[4757]: I1006 13:41:02.362776 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/901fbe7e-0856-4210-b723-30952e1fe25d-utilities\") pod \"certified-operators-h5mpv\" (UID: \"901fbe7e-0856-4210-b723-30952e1fe25d\") " pod="openshift-marketplace/certified-operators-h5mpv" Oct 06 13:41:02 crc kubenswrapper[4757]: I1006 13:41:02.362805 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/901fbe7e-0856-4210-b723-30952e1fe25d-catalog-content\") pod \"certified-operators-h5mpv\" (UID: \"901fbe7e-0856-4210-b723-30952e1fe25d\") " pod="openshift-marketplace/certified-operators-h5mpv" Oct 06 13:41:02 crc kubenswrapper[4757]: I1006 13:41:02.371116 4757 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2025-10-06T13:41:01.902286232Z","Handler":null,"Name":""} Oct 06 13:41:02 crc kubenswrapper[4757]: I1006 13:41:02.376437 4757 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Oct 06 13:41:02 crc kubenswrapper[4757]: I1006 13:41:02.376492 4757 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Oct 06 13:41:02 crc kubenswrapper[4757]: I1006 13:41:02.394152 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-jgt5d"] Oct 06 13:41:02 crc kubenswrapper[4757]: I1006 13:41:02.396953 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ktf9k\" (UniqueName: \"kubernetes.io/projected/901fbe7e-0856-4210-b723-30952e1fe25d-kube-api-access-ktf9k\") pod \"certified-operators-h5mpv\" (UID: \"901fbe7e-0856-4210-b723-30952e1fe25d\") " pod="openshift-marketplace/certified-operators-h5mpv" Oct 06 13:41:02 crc kubenswrapper[4757]: I1006 13:41:02.397155 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jgt5d" Oct 06 13:41:02 crc kubenswrapper[4757]: I1006 13:41:02.403758 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-jgt5d"] Oct 06 13:41:02 crc kubenswrapper[4757]: I1006 13:41:02.449328 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-hm8qr" Oct 06 13:41:02 crc kubenswrapper[4757]: I1006 13:41:02.449445 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-hm8qr" Oct 06 13:41:02 crc kubenswrapper[4757]: I1006 13:41:02.455509 4757 patch_prober.go:28] interesting pod/apiserver-76f77b778f-hm8qr container/openshift-apiserver namespace/openshift-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Oct 06 13:41:02 crc kubenswrapper[4757]: [+]log ok Oct 06 13:41:02 crc kubenswrapper[4757]: [+]etcd ok Oct 06 13:41:02 crc kubenswrapper[4757]: [+]poststarthook/start-apiserver-admission-initializer ok Oct 06 13:41:02 crc kubenswrapper[4757]: [+]poststarthook/generic-apiserver-start-informers ok Oct 06 13:41:02 crc kubenswrapper[4757]: [+]poststarthook/max-in-flight-filter ok Oct 06 13:41:02 crc kubenswrapper[4757]: [+]poststarthook/storage-object-count-tracker-hook ok Oct 06 13:41:02 crc kubenswrapper[4757]: [+]poststarthook/image.openshift.io-apiserver-caches ok Oct 06 13:41:02 crc kubenswrapper[4757]: [-]poststarthook/authorization.openshift.io-bootstrapclusterroles failed: reason withheld Oct 06 13:41:02 crc kubenswrapper[4757]: [-]poststarthook/authorization.openshift.io-ensurenodebootstrap-sa failed: reason withheld Oct 06 13:41:02 crc kubenswrapper[4757]: [+]poststarthook/project.openshift.io-projectcache ok Oct 06 13:41:02 crc kubenswrapper[4757]: [+]poststarthook/project.openshift.io-projectauthorizationcache ok Oct 06 13:41:02 crc kubenswrapper[4757]: [+]poststarthook/openshift.io-startinformers ok Oct 06 13:41:02 crc kubenswrapper[4757]: [+]poststarthook/openshift.io-restmapperupdater ok Oct 06 13:41:02 crc kubenswrapper[4757]: [+]poststarthook/quota.openshift.io-clusterquotamapping ok Oct 06 13:41:02 crc kubenswrapper[4757]: livez check failed Oct 06 13:41:02 crc kubenswrapper[4757]: I1006 13:41:02.455568 4757 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-apiserver/apiserver-76f77b778f-hm8qr" podUID="7724900e-8239-400e-92a8-686e0c85f223" containerName="openshift-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 06 13:41:02 crc kubenswrapper[4757]: I1006 13:41:02.463620 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 06 13:41:02 crc kubenswrapper[4757]: I1006 13:41:02.463885 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/69959a98-24ab-49d0-b774-a98f5ebe70e3-catalog-content\") pod \"community-operators-jgt5d\" (UID: \"69959a98-24ab-49d0-b774-a98f5ebe70e3\") " pod="openshift-marketplace/community-operators-jgt5d" Oct 06 13:41:02 crc kubenswrapper[4757]: I1006 13:41:02.463915 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/69959a98-24ab-49d0-b774-a98f5ebe70e3-utilities\") pod \"community-operators-jgt5d\" (UID: \"69959a98-24ab-49d0-b774-a98f5ebe70e3\") " pod="openshift-marketplace/community-operators-jgt5d" Oct 06 13:41:02 crc kubenswrapper[4757]: I1006 13:41:02.463969 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-896n4\" (UniqueName: \"kubernetes.io/projected/69959a98-24ab-49d0-b774-a98f5ebe70e3-kube-api-access-896n4\") pod \"community-operators-jgt5d\" (UID: \"69959a98-24ab-49d0-b774-a98f5ebe70e3\") " pod="openshift-marketplace/community-operators-jgt5d" Oct 06 13:41:02 crc kubenswrapper[4757]: I1006 13:41:02.469545 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Oct 06 13:41:02 crc kubenswrapper[4757]: I1006 13:41:02.513569 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-h5mpv" Oct 06 13:41:02 crc kubenswrapper[4757]: I1006 13:41:02.530624 4757 patch_prober.go:28] interesting pod/router-default-5444994796-mtmpm container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 06 13:41:02 crc kubenswrapper[4757]: [-]has-synced failed: reason withheld Oct 06 13:41:02 crc kubenswrapper[4757]: [+]process-running ok Oct 06 13:41:02 crc kubenswrapper[4757]: healthz check failed Oct 06 13:41:02 crc kubenswrapper[4757]: I1006 13:41:02.530685 4757 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-mtmpm" podUID="4845c7dd-e037-41f4-914b-bef0afffaad6" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 06 13:41:02 crc kubenswrapper[4757]: I1006 13:41:02.566042 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/69959a98-24ab-49d0-b774-a98f5ebe70e3-catalog-content\") pod \"community-operators-jgt5d\" (UID: \"69959a98-24ab-49d0-b774-a98f5ebe70e3\") " pod="openshift-marketplace/community-operators-jgt5d" Oct 06 13:41:02 crc kubenswrapper[4757]: I1006 13:41:02.566111 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/69959a98-24ab-49d0-b774-a98f5ebe70e3-utilities\") pod \"community-operators-jgt5d\" (UID: \"69959a98-24ab-49d0-b774-a98f5ebe70e3\") " pod="openshift-marketplace/community-operators-jgt5d" Oct 06 13:41:02 crc kubenswrapper[4757]: I1006 13:41:02.566154 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-99wzw\" (UID: \"a7126d08-f833-4acc-b4f4-e7d4d88b00ca\") " pod="openshift-image-registry/image-registry-697d97f7c8-99wzw" Oct 06 13:41:02 crc kubenswrapper[4757]: I1006 13:41:02.566203 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-896n4\" (UniqueName: \"kubernetes.io/projected/69959a98-24ab-49d0-b774-a98f5ebe70e3-kube-api-access-896n4\") pod \"community-operators-jgt5d\" (UID: \"69959a98-24ab-49d0-b774-a98f5ebe70e3\") " pod="openshift-marketplace/community-operators-jgt5d" Oct 06 13:41:02 crc kubenswrapper[4757]: I1006 13:41:02.568199 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/69959a98-24ab-49d0-b774-a98f5ebe70e3-catalog-content\") pod \"community-operators-jgt5d\" (UID: \"69959a98-24ab-49d0-b774-a98f5ebe70e3\") " pod="openshift-marketplace/community-operators-jgt5d" Oct 06 13:41:02 crc kubenswrapper[4757]: I1006 13:41:02.568294 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/69959a98-24ab-49d0-b774-a98f5ebe70e3-utilities\") pod \"community-operators-jgt5d\" (UID: \"69959a98-24ab-49d0-b774-a98f5ebe70e3\") " pod="openshift-marketplace/community-operators-jgt5d" Oct 06 13:41:02 crc kubenswrapper[4757]: I1006 13:41:02.582121 4757 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Oct 06 13:41:02 crc kubenswrapper[4757]: I1006 13:41:02.582171 4757 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-99wzw\" (UID: \"a7126d08-f833-4acc-b4f4-e7d4d88b00ca\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-99wzw" Oct 06 13:41:02 crc kubenswrapper[4757]: I1006 13:41:02.583935 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-qg4qm" Oct 06 13:41:02 crc kubenswrapper[4757]: I1006 13:41:02.595328 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-896n4\" (UniqueName: \"kubernetes.io/projected/69959a98-24ab-49d0-b774-a98f5ebe70e3-kube-api-access-896n4\") pod \"community-operators-jgt5d\" (UID: \"69959a98-24ab-49d0-b774-a98f5ebe70e3\") " pod="openshift-marketplace/community-operators-jgt5d" Oct 06 13:41:02 crc kubenswrapper[4757]: I1006 13:41:02.600703 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-d8wfp"] Oct 06 13:41:02 crc kubenswrapper[4757]: I1006 13:41:02.601622 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-d8wfp" Oct 06 13:41:02 crc kubenswrapper[4757]: I1006 13:41:02.612001 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-d8wfp"] Oct 06 13:41:02 crc kubenswrapper[4757]: I1006 13:41:02.626999 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-bdfrj"] Oct 06 13:41:02 crc kubenswrapper[4757]: I1006 13:41:02.641969 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-99wzw\" (UID: \"a7126d08-f833-4acc-b4f4-e7d4d88b00ca\") " pod="openshift-image-registry/image-registry-697d97f7c8-99wzw" Oct 06 13:41:02 crc kubenswrapper[4757]: I1006 13:41:02.656144 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-99wzw" Oct 06 13:41:02 crc kubenswrapper[4757]: W1006 13:41:02.664914 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podab36484b_2956_4a35_8379_6b1fc3ffca49.slice/crio-1b0e350f3bf923bfde15d553339b8e79de047f7549ac54d742d31453d7329bc5 WatchSource:0}: Error finding container 1b0e350f3bf923bfde15d553339b8e79de047f7549ac54d742d31453d7329bc5: Status 404 returned error can't find the container with id 1b0e350f3bf923bfde15d553339b8e79de047f7549ac54d742d31453d7329bc5 Oct 06 13:41:02 crc kubenswrapper[4757]: I1006 13:41:02.668224 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f9a9e7aa-5ab4-4877-b808-1097b5c27155-catalog-content\") pod \"certified-operators-d8wfp\" (UID: \"f9a9e7aa-5ab4-4877-b808-1097b5c27155\") " pod="openshift-marketplace/certified-operators-d8wfp" Oct 06 13:41:02 crc kubenswrapper[4757]: I1006 13:41:02.668312 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f9a9e7aa-5ab4-4877-b808-1097b5c27155-utilities\") pod \"certified-operators-d8wfp\" (UID: \"f9a9e7aa-5ab4-4877-b808-1097b5c27155\") " pod="openshift-marketplace/certified-operators-d8wfp" Oct 06 13:41:02 crc kubenswrapper[4757]: I1006 13:41:02.668395 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2vpxh\" (UniqueName: \"kubernetes.io/projected/f9a9e7aa-5ab4-4877-b808-1097b5c27155-kube-api-access-2vpxh\") pod \"certified-operators-d8wfp\" (UID: \"f9a9e7aa-5ab4-4877-b808-1097b5c27155\") " pod="openshift-marketplace/certified-operators-d8wfp" Oct 06 13:41:02 crc kubenswrapper[4757]: I1006 13:41:02.735500 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jgt5d" Oct 06 13:41:02 crc kubenswrapper[4757]: I1006 13:41:02.756379 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-h5mpv"] Oct 06 13:41:02 crc kubenswrapper[4757]: I1006 13:41:02.771541 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f9a9e7aa-5ab4-4877-b808-1097b5c27155-catalog-content\") pod \"certified-operators-d8wfp\" (UID: \"f9a9e7aa-5ab4-4877-b808-1097b5c27155\") " pod="openshift-marketplace/certified-operators-d8wfp" Oct 06 13:41:02 crc kubenswrapper[4757]: I1006 13:41:02.771590 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f9a9e7aa-5ab4-4877-b808-1097b5c27155-utilities\") pod \"certified-operators-d8wfp\" (UID: \"f9a9e7aa-5ab4-4877-b808-1097b5c27155\") " pod="openshift-marketplace/certified-operators-d8wfp" Oct 06 13:41:02 crc kubenswrapper[4757]: I1006 13:41:02.771633 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2vpxh\" (UniqueName: \"kubernetes.io/projected/f9a9e7aa-5ab4-4877-b808-1097b5c27155-kube-api-access-2vpxh\") pod \"certified-operators-d8wfp\" (UID: \"f9a9e7aa-5ab4-4877-b808-1097b5c27155\") " pod="openshift-marketplace/certified-operators-d8wfp" Oct 06 13:41:02 crc kubenswrapper[4757]: I1006 13:41:02.772375 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f9a9e7aa-5ab4-4877-b808-1097b5c27155-utilities\") pod \"certified-operators-d8wfp\" (UID: \"f9a9e7aa-5ab4-4877-b808-1097b5c27155\") " pod="openshift-marketplace/certified-operators-d8wfp" Oct 06 13:41:02 crc kubenswrapper[4757]: I1006 13:41:02.772731 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f9a9e7aa-5ab4-4877-b808-1097b5c27155-catalog-content\") pod \"certified-operators-d8wfp\" (UID: \"f9a9e7aa-5ab4-4877-b808-1097b5c27155\") " pod="openshift-marketplace/certified-operators-d8wfp" Oct 06 13:41:02 crc kubenswrapper[4757]: W1006 13:41:02.774962 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod901fbe7e_0856_4210_b723_30952e1fe25d.slice/crio-d4a20230271817d5cd81614abc4eaba4391d6c4f9f924ccda4d0b1a7cef6aa80 WatchSource:0}: Error finding container d4a20230271817d5cd81614abc4eaba4391d6c4f9f924ccda4d0b1a7cef6aa80: Status 404 returned error can't find the container with id d4a20230271817d5cd81614abc4eaba4391d6c4f9f924ccda4d0b1a7cef6aa80 Oct 06 13:41:02 crc kubenswrapper[4757]: I1006 13:41:02.794463 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2vpxh\" (UniqueName: \"kubernetes.io/projected/f9a9e7aa-5ab4-4877-b808-1097b5c27155-kube-api-access-2vpxh\") pod \"certified-operators-d8wfp\" (UID: \"f9a9e7aa-5ab4-4877-b808-1097b5c27155\") " pod="openshift-marketplace/certified-operators-d8wfp" Oct 06 13:41:02 crc kubenswrapper[4757]: I1006 13:41:02.913513 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-99wzw"] Oct 06 13:41:02 crc kubenswrapper[4757]: I1006 13:41:02.935546 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-h5mpv" event={"ID":"901fbe7e-0856-4210-b723-30952e1fe25d","Type":"ContainerStarted","Data":"af7a4474fb44ba23925187c4320bbf38aff8eb46dc7be1ee2f764ee24cffc76e"} Oct 06 13:41:02 crc kubenswrapper[4757]: I1006 13:41:02.935588 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-h5mpv" event={"ID":"901fbe7e-0856-4210-b723-30952e1fe25d","Type":"ContainerStarted","Data":"d4a20230271817d5cd81614abc4eaba4391d6c4f9f924ccda4d0b1a7cef6aa80"} Oct 06 13:41:02 crc kubenswrapper[4757]: I1006 13:41:02.962230 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-rvzds" event={"ID":"c11f3757-611f-48f6-a4c4-909f4f45ccdf","Type":"ContainerStarted","Data":"f02c98920adef6d3b43be804054c826bbd9861392316fbcef29d5cb59694ea88"} Oct 06 13:41:02 crc kubenswrapper[4757]: I1006 13:41:02.962279 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-rvzds" event={"ID":"c11f3757-611f-48f6-a4c4-909f4f45ccdf","Type":"ContainerStarted","Data":"2fadfc25b64e29e8506ff85cbacbf02ee8594a8a3440e5eb1e71e32f10e98dca"} Oct 06 13:41:02 crc kubenswrapper[4757]: I1006 13:41:02.963133 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-d8wfp" Oct 06 13:41:02 crc kubenswrapper[4757]: I1006 13:41:02.967355 4757 generic.go:334] "Generic (PLEG): container finished" podID="02c5c000-37ed-4b65-bfbc-8cef2e245fe3" containerID="bf1e0c108e3aa2f8c9a246bba96a282bc6a5d90ef44dee813c9bdaa5f08099c3" exitCode=0 Oct 06 13:41:02 crc kubenswrapper[4757]: I1006 13:41:02.967497 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"02c5c000-37ed-4b65-bfbc-8cef2e245fe3","Type":"ContainerDied","Data":"bf1e0c108e3aa2f8c9a246bba96a282bc6a5d90ef44dee813c9bdaa5f08099c3"} Oct 06 13:41:02 crc kubenswrapper[4757]: I1006 13:41:02.978338 4757 generic.go:334] "Generic (PLEG): container finished" podID="ab36484b-2956-4a35-8379-6b1fc3ffca49" containerID="edad05f0c30cde4046294ceeae626b7c2072ffcbc83d031bdc5e0e92610ad0ec" exitCode=0 Oct 06 13:41:02 crc kubenswrapper[4757]: I1006 13:41:02.978454 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bdfrj" event={"ID":"ab36484b-2956-4a35-8379-6b1fc3ffca49","Type":"ContainerDied","Data":"edad05f0c30cde4046294ceeae626b7c2072ffcbc83d031bdc5e0e92610ad0ec"} Oct 06 13:41:02 crc kubenswrapper[4757]: I1006 13:41:02.978502 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bdfrj" event={"ID":"ab36484b-2956-4a35-8379-6b1fc3ffca49","Type":"ContainerStarted","Data":"1b0e350f3bf923bfde15d553339b8e79de047f7549ac54d742d31453d7329bc5"} Oct 06 13:41:02 crc kubenswrapper[4757]: I1006 13:41:02.989206 4757 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 06 13:41:03 crc kubenswrapper[4757]: I1006 13:41:03.001623 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-rvzds" podStartSLOduration=12.001602989 podStartE2EDuration="12.001602989s" podCreationTimestamp="2025-10-06 13:40:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:41:02.996609255 +0000 UTC m=+151.493927802" watchObservedRunningTime="2025-10-06 13:41:03.001602989 +0000 UTC m=+151.498921526" Oct 06 13:41:03 crc kubenswrapper[4757]: I1006 13:41:03.162152 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-jgt5d"] Oct 06 13:41:03 crc kubenswrapper[4757]: I1006 13:41:03.174899 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Oct 06 13:41:03 crc kubenswrapper[4757]: I1006 13:41:03.175728 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 06 13:41:03 crc kubenswrapper[4757]: I1006 13:41:03.179776 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Oct 06 13:41:03 crc kubenswrapper[4757]: W1006 13:41:03.180129 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod69959a98_24ab_49d0_b774_a98f5ebe70e3.slice/crio-1783e0312f251ef23d10189019424113b7e25f74be77f2a8a05fa365acb3cc6f WatchSource:0}: Error finding container 1783e0312f251ef23d10189019424113b7e25f74be77f2a8a05fa365acb3cc6f: Status 404 returned error can't find the container with id 1783e0312f251ef23d10189019424113b7e25f74be77f2a8a05fa365acb3cc6f Oct 06 13:41:03 crc kubenswrapper[4757]: I1006 13:41:03.180214 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Oct 06 13:41:03 crc kubenswrapper[4757]: I1006 13:41:03.203551 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Oct 06 13:41:03 crc kubenswrapper[4757]: I1006 13:41:03.267379 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-d2rg2" Oct 06 13:41:03 crc kubenswrapper[4757]: I1006 13:41:03.281345 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/77dba3de-14f0-4ae5-8fe3-063537d1c5a1-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"77dba3de-14f0-4ae5-8fe3-063537d1c5a1\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 06 13:41:03 crc kubenswrapper[4757]: I1006 13:41:03.281410 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/77dba3de-14f0-4ae5-8fe3-063537d1c5a1-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"77dba3de-14f0-4ae5-8fe3-063537d1c5a1\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 06 13:41:03 crc kubenswrapper[4757]: I1006 13:41:03.281498 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-xbn64" Oct 06 13:41:03 crc kubenswrapper[4757]: I1006 13:41:03.281532 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-xbn64" Oct 06 13:41:03 crc kubenswrapper[4757]: I1006 13:41:03.287585 4757 patch_prober.go:28] interesting pod/console-f9d7485db-xbn64 container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.14:8443/health\": dial tcp 10.217.0.14:8443: connect: connection refused" start-of-body= Oct 06 13:41:03 crc kubenswrapper[4757]: I1006 13:41:03.287641 4757 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-xbn64" podUID="0345b748-8161-40b5-bec8-0c36c2d87ea3" containerName="console" probeResult="failure" output="Get \"https://10.217.0.14:8443/health\": dial tcp 10.217.0.14:8443: connect: connection refused" Oct 06 13:41:03 crc kubenswrapper[4757]: I1006 13:41:03.303361 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29329290-rk7ds" Oct 06 13:41:03 crc kubenswrapper[4757]: I1006 13:41:03.359103 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-d8wfp"] Oct 06 13:41:03 crc kubenswrapper[4757]: I1006 13:41:03.361026 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-bgct2" Oct 06 13:41:03 crc kubenswrapper[4757]: W1006 13:41:03.380540 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf9a9e7aa_5ab4_4877_b808_1097b5c27155.slice/crio-d577142a1cec83551904a69309a0f9a4d5c5e36559d588b4cc957b5624d6951e WatchSource:0}: Error finding container d577142a1cec83551904a69309a0f9a4d5c5e36559d588b4cc957b5624d6951e: Status 404 returned error can't find the container with id d577142a1cec83551904a69309a0f9a4d5c5e36559d588b4cc957b5624d6951e Oct 06 13:41:03 crc kubenswrapper[4757]: I1006 13:41:03.381899 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7b93821f-dfad-44a5-a217-eb63987c1f0a-secret-volume\") pod \"7b93821f-dfad-44a5-a217-eb63987c1f0a\" (UID: \"7b93821f-dfad-44a5-a217-eb63987c1f0a\") " Oct 06 13:41:03 crc kubenswrapper[4757]: I1006 13:41:03.381931 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fq2dh\" (UniqueName: \"kubernetes.io/projected/7b93821f-dfad-44a5-a217-eb63987c1f0a-kube-api-access-fq2dh\") pod \"7b93821f-dfad-44a5-a217-eb63987c1f0a\" (UID: \"7b93821f-dfad-44a5-a217-eb63987c1f0a\") " Oct 06 13:41:03 crc kubenswrapper[4757]: I1006 13:41:03.382064 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7b93821f-dfad-44a5-a217-eb63987c1f0a-config-volume\") pod \"7b93821f-dfad-44a5-a217-eb63987c1f0a\" (UID: \"7b93821f-dfad-44a5-a217-eb63987c1f0a\") " Oct 06 13:41:03 crc kubenswrapper[4757]: I1006 13:41:03.382243 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/77dba3de-14f0-4ae5-8fe3-063537d1c5a1-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"77dba3de-14f0-4ae5-8fe3-063537d1c5a1\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 06 13:41:03 crc kubenswrapper[4757]: I1006 13:41:03.382373 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/77dba3de-14f0-4ae5-8fe3-063537d1c5a1-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"77dba3de-14f0-4ae5-8fe3-063537d1c5a1\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 06 13:41:03 crc kubenswrapper[4757]: I1006 13:41:03.383245 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-bgct2" Oct 06 13:41:03 crc kubenswrapper[4757]: I1006 13:41:03.387542 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/77dba3de-14f0-4ae5-8fe3-063537d1c5a1-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"77dba3de-14f0-4ae5-8fe3-063537d1c5a1\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 06 13:41:03 crc kubenswrapper[4757]: I1006 13:41:03.388438 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7b93821f-dfad-44a5-a217-eb63987c1f0a-config-volume" (OuterVolumeSpecName: "config-volume") pod "7b93821f-dfad-44a5-a217-eb63987c1f0a" (UID: "7b93821f-dfad-44a5-a217-eb63987c1f0a"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:41:03 crc kubenswrapper[4757]: I1006 13:41:03.389028 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7b93821f-dfad-44a5-a217-eb63987c1f0a-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "7b93821f-dfad-44a5-a217-eb63987c1f0a" (UID: "7b93821f-dfad-44a5-a217-eb63987c1f0a"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:41:03 crc kubenswrapper[4757]: I1006 13:41:03.389669 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7b93821f-dfad-44a5-a217-eb63987c1f0a-kube-api-access-fq2dh" (OuterVolumeSpecName: "kube-api-access-fq2dh") pod "7b93821f-dfad-44a5-a217-eb63987c1f0a" (UID: "7b93821f-dfad-44a5-a217-eb63987c1f0a"). InnerVolumeSpecName "kube-api-access-fq2dh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:41:03 crc kubenswrapper[4757]: I1006 13:41:03.413738 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/77dba3de-14f0-4ae5-8fe3-063537d1c5a1-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"77dba3de-14f0-4ae5-8fe3-063537d1c5a1\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 06 13:41:03 crc kubenswrapper[4757]: I1006 13:41:03.484332 4757 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7b93821f-dfad-44a5-a217-eb63987c1f0a-config-volume\") on node \"crc\" DevicePath \"\"" Oct 06 13:41:03 crc kubenswrapper[4757]: I1006 13:41:03.484374 4757 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7b93821f-dfad-44a5-a217-eb63987c1f0a-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 06 13:41:03 crc kubenswrapper[4757]: I1006 13:41:03.484486 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fq2dh\" (UniqueName: \"kubernetes.io/projected/7b93821f-dfad-44a5-a217-eb63987c1f0a-kube-api-access-fq2dh\") on node \"crc\" DevicePath \"\"" Oct 06 13:41:03 crc kubenswrapper[4757]: I1006 13:41:03.514437 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 06 13:41:03 crc kubenswrapper[4757]: I1006 13:41:03.527422 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-mtmpm" Oct 06 13:41:03 crc kubenswrapper[4757]: I1006 13:41:03.534539 4757 patch_prober.go:28] interesting pod/router-default-5444994796-mtmpm container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 06 13:41:03 crc kubenswrapper[4757]: [-]has-synced failed: reason withheld Oct 06 13:41:03 crc kubenswrapper[4757]: [+]process-running ok Oct 06 13:41:03 crc kubenswrapper[4757]: healthz check failed Oct 06 13:41:03 crc kubenswrapper[4757]: I1006 13:41:03.534582 4757 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-mtmpm" podUID="4845c7dd-e037-41f4-914b-bef0afffaad6" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 06 13:41:03 crc kubenswrapper[4757]: I1006 13:41:03.710264 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Oct 06 13:41:03 crc kubenswrapper[4757]: W1006 13:41:03.718809 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod77dba3de_14f0_4ae5_8fe3_063537d1c5a1.slice/crio-c08c8d4b6cee2966248c67bac1a98746f705e07b8faa75519303c8efc3f4c2e4 WatchSource:0}: Error finding container c08c8d4b6cee2966248c67bac1a98746f705e07b8faa75519303c8efc3f4c2e4: Status 404 returned error can't find the container with id c08c8d4b6cee2966248c67bac1a98746f705e07b8faa75519303c8efc3f4c2e4 Oct 06 13:41:03 crc kubenswrapper[4757]: I1006 13:41:03.984845 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"77dba3de-14f0-4ae5-8fe3-063537d1c5a1","Type":"ContainerStarted","Data":"c08c8d4b6cee2966248c67bac1a98746f705e07b8faa75519303c8efc3f4c2e4"} Oct 06 13:41:03 crc kubenswrapper[4757]: I1006 13:41:03.988238 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29329290-rk7ds" event={"ID":"7b93821f-dfad-44a5-a217-eb63987c1f0a","Type":"ContainerDied","Data":"6d2521b2e9923c3b41dc9454fc3f0355f688fc85c011c6dd6614362cc7cc3031"} Oct 06 13:41:03 crc kubenswrapper[4757]: I1006 13:41:03.988291 4757 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6d2521b2e9923c3b41dc9454fc3f0355f688fc85c011c6dd6614362cc7cc3031" Oct 06 13:41:03 crc kubenswrapper[4757]: I1006 13:41:03.988347 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29329290-rk7ds" Oct 06 13:41:03 crc kubenswrapper[4757]: I1006 13:41:03.990327 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-99wzw" event={"ID":"a7126d08-f833-4acc-b4f4-e7d4d88b00ca","Type":"ContainerStarted","Data":"a075f29fd5812a9e54f3e8d835a3f5ca47ec5ea7b4c76337ad1df03cdd2681f0"} Oct 06 13:41:03 crc kubenswrapper[4757]: I1006 13:41:03.990391 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-99wzw" event={"ID":"a7126d08-f833-4acc-b4f4-e7d4d88b00ca","Type":"ContainerStarted","Data":"74523d52d1ec252089eb5972f8460dbc649b6154651d7db727eebf7fc18fea87"} Oct 06 13:41:03 crc kubenswrapper[4757]: I1006 13:41:03.990620 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-99wzw" Oct 06 13:41:03 crc kubenswrapper[4757]: I1006 13:41:03.993979 4757 generic.go:334] "Generic (PLEG): container finished" podID="f9a9e7aa-5ab4-4877-b808-1097b5c27155" containerID="5e4bba0b49eaa738aededcd7be03312fff450727bb0cd7e3959199e1f601f501" exitCode=0 Oct 06 13:41:03 crc kubenswrapper[4757]: I1006 13:41:03.994211 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-d8wfp" event={"ID":"f9a9e7aa-5ab4-4877-b808-1097b5c27155","Type":"ContainerDied","Data":"5e4bba0b49eaa738aededcd7be03312fff450727bb0cd7e3959199e1f601f501"} Oct 06 13:41:03 crc kubenswrapper[4757]: I1006 13:41:03.994262 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-d8wfp" event={"ID":"f9a9e7aa-5ab4-4877-b808-1097b5c27155","Type":"ContainerStarted","Data":"d577142a1cec83551904a69309a0f9a4d5c5e36559d588b4cc957b5624d6951e"} Oct 06 13:41:03 crc kubenswrapper[4757]: I1006 13:41:03.996822 4757 generic.go:334] "Generic (PLEG): container finished" podID="69959a98-24ab-49d0-b774-a98f5ebe70e3" containerID="abde05d6be0a40b05252ba59b3c64776430954fd3f6a9d9ba5dc5ad0cdeef71a" exitCode=0 Oct 06 13:41:03 crc kubenswrapper[4757]: I1006 13:41:03.996935 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jgt5d" event={"ID":"69959a98-24ab-49d0-b774-a98f5ebe70e3","Type":"ContainerDied","Data":"abde05d6be0a40b05252ba59b3c64776430954fd3f6a9d9ba5dc5ad0cdeef71a"} Oct 06 13:41:03 crc kubenswrapper[4757]: I1006 13:41:03.997009 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jgt5d" event={"ID":"69959a98-24ab-49d0-b774-a98f5ebe70e3","Type":"ContainerStarted","Data":"1783e0312f251ef23d10189019424113b7e25f74be77f2a8a05fa365acb3cc6f"} Oct 06 13:41:04 crc kubenswrapper[4757]: I1006 13:41:04.001678 4757 generic.go:334] "Generic (PLEG): container finished" podID="901fbe7e-0856-4210-b723-30952e1fe25d" containerID="af7a4474fb44ba23925187c4320bbf38aff8eb46dc7be1ee2f764ee24cffc76e" exitCode=0 Oct 06 13:41:04 crc kubenswrapper[4757]: I1006 13:41:04.003440 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-h5mpv" event={"ID":"901fbe7e-0856-4210-b723-30952e1fe25d","Type":"ContainerDied","Data":"af7a4474fb44ba23925187c4320bbf38aff8eb46dc7be1ee2f764ee24cffc76e"} Oct 06 13:41:04 crc kubenswrapper[4757]: I1006 13:41:04.029751 4757 patch_prober.go:28] interesting pod/downloads-7954f5f757-kpprd container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.17:8080/\": dial tcp 10.217.0.17:8080: connect: connection refused" start-of-body= Oct 06 13:41:04 crc kubenswrapper[4757]: I1006 13:41:04.029808 4757 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-kpprd" podUID="2ddf9932-df5d-40b6-88f8-ced01d618903" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.17:8080/\": dial tcp 10.217.0.17:8080: connect: connection refused" Oct 06 13:41:04 crc kubenswrapper[4757]: I1006 13:41:04.029750 4757 patch_prober.go:28] interesting pod/downloads-7954f5f757-kpprd container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.17:8080/\": dial tcp 10.217.0.17:8080: connect: connection refused" start-of-body= Oct 06 13:41:04 crc kubenswrapper[4757]: I1006 13:41:04.029859 4757 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-kpprd" podUID="2ddf9932-df5d-40b6-88f8-ced01d618903" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.17:8080/\": dial tcp 10.217.0.17:8080: connect: connection refused" Oct 06 13:41:04 crc kubenswrapper[4757]: I1006 13:41:04.029830 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-99wzw" podStartSLOduration=132.029801565 podStartE2EDuration="2m12.029801565s" podCreationTimestamp="2025-10-06 13:38:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:41:04.017030895 +0000 UTC m=+152.514349492" watchObservedRunningTime="2025-10-06 13:41:04.029801565 +0000 UTC m=+152.527120143" Oct 06 13:41:04 crc kubenswrapper[4757]: I1006 13:41:04.193407 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Oct 06 13:41:04 crc kubenswrapper[4757]: I1006 13:41:04.197079 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-5m42j"] Oct 06 13:41:04 crc kubenswrapper[4757]: E1006 13:41:04.197391 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b93821f-dfad-44a5-a217-eb63987c1f0a" containerName="collect-profiles" Oct 06 13:41:04 crc kubenswrapper[4757]: I1006 13:41:04.197419 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b93821f-dfad-44a5-a217-eb63987c1f0a" containerName="collect-profiles" Oct 06 13:41:04 crc kubenswrapper[4757]: I1006 13:41:04.197580 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="7b93821f-dfad-44a5-a217-eb63987c1f0a" containerName="collect-profiles" Oct 06 13:41:04 crc kubenswrapper[4757]: I1006 13:41:04.198732 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-5m42j"] Oct 06 13:41:04 crc kubenswrapper[4757]: I1006 13:41:04.198865 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5m42j" Oct 06 13:41:04 crc kubenswrapper[4757]: I1006 13:41:04.201410 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Oct 06 13:41:04 crc kubenswrapper[4757]: I1006 13:41:04.223993 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 06 13:41:04 crc kubenswrapper[4757]: I1006 13:41:04.293739 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/02c5c000-37ed-4b65-bfbc-8cef2e245fe3-kube-api-access\") pod \"02c5c000-37ed-4b65-bfbc-8cef2e245fe3\" (UID: \"02c5c000-37ed-4b65-bfbc-8cef2e245fe3\") " Oct 06 13:41:04 crc kubenswrapper[4757]: I1006 13:41:04.293799 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/02c5c000-37ed-4b65-bfbc-8cef2e245fe3-kubelet-dir\") pod \"02c5c000-37ed-4b65-bfbc-8cef2e245fe3\" (UID: \"02c5c000-37ed-4b65-bfbc-8cef2e245fe3\") " Oct 06 13:41:04 crc kubenswrapper[4757]: I1006 13:41:04.293934 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/02c5c000-37ed-4b65-bfbc-8cef2e245fe3-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "02c5c000-37ed-4b65-bfbc-8cef2e245fe3" (UID: "02c5c000-37ed-4b65-bfbc-8cef2e245fe3"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 06 13:41:04 crc kubenswrapper[4757]: I1006 13:41:04.294152 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7zcwv\" (UniqueName: \"kubernetes.io/projected/d2a35dbd-b700-4111-a6c4-eb7dc42cfbe7-kube-api-access-7zcwv\") pod \"redhat-marketplace-5m42j\" (UID: \"d2a35dbd-b700-4111-a6c4-eb7dc42cfbe7\") " pod="openshift-marketplace/redhat-marketplace-5m42j" Oct 06 13:41:04 crc kubenswrapper[4757]: I1006 13:41:04.294184 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d2a35dbd-b700-4111-a6c4-eb7dc42cfbe7-catalog-content\") pod \"redhat-marketplace-5m42j\" (UID: \"d2a35dbd-b700-4111-a6c4-eb7dc42cfbe7\") " pod="openshift-marketplace/redhat-marketplace-5m42j" Oct 06 13:41:04 crc kubenswrapper[4757]: I1006 13:41:04.294204 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d2a35dbd-b700-4111-a6c4-eb7dc42cfbe7-utilities\") pod \"redhat-marketplace-5m42j\" (UID: \"d2a35dbd-b700-4111-a6c4-eb7dc42cfbe7\") " pod="openshift-marketplace/redhat-marketplace-5m42j" Oct 06 13:41:04 crc kubenswrapper[4757]: I1006 13:41:04.294462 4757 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/02c5c000-37ed-4b65-bfbc-8cef2e245fe3-kubelet-dir\") on node \"crc\" DevicePath \"\"" Oct 06 13:41:04 crc kubenswrapper[4757]: I1006 13:41:04.306539 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/02c5c000-37ed-4b65-bfbc-8cef2e245fe3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "02c5c000-37ed-4b65-bfbc-8cef2e245fe3" (UID: "02c5c000-37ed-4b65-bfbc-8cef2e245fe3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:41:04 crc kubenswrapper[4757]: I1006 13:41:04.361534 4757 patch_prober.go:28] interesting pod/machine-config-daemon-7tb7h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 06 13:41:04 crc kubenswrapper[4757]: I1006 13:41:04.361585 4757 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 06 13:41:04 crc kubenswrapper[4757]: I1006 13:41:04.383122 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-sbbvc" Oct 06 13:41:04 crc kubenswrapper[4757]: I1006 13:41:04.392809 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-rdbhp" Oct 06 13:41:04 crc kubenswrapper[4757]: I1006 13:41:04.395913 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7zcwv\" (UniqueName: \"kubernetes.io/projected/d2a35dbd-b700-4111-a6c4-eb7dc42cfbe7-kube-api-access-7zcwv\") pod \"redhat-marketplace-5m42j\" (UID: \"d2a35dbd-b700-4111-a6c4-eb7dc42cfbe7\") " pod="openshift-marketplace/redhat-marketplace-5m42j" Oct 06 13:41:04 crc kubenswrapper[4757]: I1006 13:41:04.395944 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d2a35dbd-b700-4111-a6c4-eb7dc42cfbe7-catalog-content\") pod \"redhat-marketplace-5m42j\" (UID: \"d2a35dbd-b700-4111-a6c4-eb7dc42cfbe7\") " pod="openshift-marketplace/redhat-marketplace-5m42j" Oct 06 13:41:04 crc kubenswrapper[4757]: I1006 13:41:04.395964 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d2a35dbd-b700-4111-a6c4-eb7dc42cfbe7-utilities\") pod \"redhat-marketplace-5m42j\" (UID: \"d2a35dbd-b700-4111-a6c4-eb7dc42cfbe7\") " pod="openshift-marketplace/redhat-marketplace-5m42j" Oct 06 13:41:04 crc kubenswrapper[4757]: I1006 13:41:04.396032 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/02c5c000-37ed-4b65-bfbc-8cef2e245fe3-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 06 13:41:04 crc kubenswrapper[4757]: I1006 13:41:04.396693 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d2a35dbd-b700-4111-a6c4-eb7dc42cfbe7-utilities\") pod \"redhat-marketplace-5m42j\" (UID: \"d2a35dbd-b700-4111-a6c4-eb7dc42cfbe7\") " pod="openshift-marketplace/redhat-marketplace-5m42j" Oct 06 13:41:04 crc kubenswrapper[4757]: I1006 13:41:04.396748 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d2a35dbd-b700-4111-a6c4-eb7dc42cfbe7-catalog-content\") pod \"redhat-marketplace-5m42j\" (UID: \"d2a35dbd-b700-4111-a6c4-eb7dc42cfbe7\") " pod="openshift-marketplace/redhat-marketplace-5m42j" Oct 06 13:41:04 crc kubenswrapper[4757]: I1006 13:41:04.428032 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7zcwv\" (UniqueName: \"kubernetes.io/projected/d2a35dbd-b700-4111-a6c4-eb7dc42cfbe7-kube-api-access-7zcwv\") pod \"redhat-marketplace-5m42j\" (UID: \"d2a35dbd-b700-4111-a6c4-eb7dc42cfbe7\") " pod="openshift-marketplace/redhat-marketplace-5m42j" Oct 06 13:41:04 crc kubenswrapper[4757]: I1006 13:41:04.520252 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5m42j" Oct 06 13:41:04 crc kubenswrapper[4757]: I1006 13:41:04.529922 4757 patch_prober.go:28] interesting pod/router-default-5444994796-mtmpm container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 06 13:41:04 crc kubenswrapper[4757]: [-]has-synced failed: reason withheld Oct 06 13:41:04 crc kubenswrapper[4757]: [+]process-running ok Oct 06 13:41:04 crc kubenswrapper[4757]: healthz check failed Oct 06 13:41:04 crc kubenswrapper[4757]: I1006 13:41:04.529981 4757 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-mtmpm" podUID="4845c7dd-e037-41f4-914b-bef0afffaad6" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 06 13:41:04 crc kubenswrapper[4757]: I1006 13:41:04.591647 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-6tcf8"] Oct 06 13:41:04 crc kubenswrapper[4757]: E1006 13:41:04.592032 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="02c5c000-37ed-4b65-bfbc-8cef2e245fe3" containerName="pruner" Oct 06 13:41:04 crc kubenswrapper[4757]: I1006 13:41:04.592051 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="02c5c000-37ed-4b65-bfbc-8cef2e245fe3" containerName="pruner" Oct 06 13:41:04 crc kubenswrapper[4757]: I1006 13:41:04.592172 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="02c5c000-37ed-4b65-bfbc-8cef2e245fe3" containerName="pruner" Oct 06 13:41:04 crc kubenswrapper[4757]: I1006 13:41:04.593856 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6tcf8" Oct 06 13:41:04 crc kubenswrapper[4757]: I1006 13:41:04.596448 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-6tcf8"] Oct 06 13:41:04 crc kubenswrapper[4757]: I1006 13:41:04.664656 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-f5469" Oct 06 13:41:04 crc kubenswrapper[4757]: I1006 13:41:04.736743 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/58fe806e-025a-4c3f-97d7-b334e810ef16-utilities\") pod \"redhat-marketplace-6tcf8\" (UID: \"58fe806e-025a-4c3f-97d7-b334e810ef16\") " pod="openshift-marketplace/redhat-marketplace-6tcf8" Oct 06 13:41:04 crc kubenswrapper[4757]: I1006 13:41:04.736791 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/58fe806e-025a-4c3f-97d7-b334e810ef16-catalog-content\") pod \"redhat-marketplace-6tcf8\" (UID: \"58fe806e-025a-4c3f-97d7-b334e810ef16\") " pod="openshift-marketplace/redhat-marketplace-6tcf8" Oct 06 13:41:04 crc kubenswrapper[4757]: I1006 13:41:04.736810 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hgkff\" (UniqueName: \"kubernetes.io/projected/58fe806e-025a-4c3f-97d7-b334e810ef16-kube-api-access-hgkff\") pod \"redhat-marketplace-6tcf8\" (UID: \"58fe806e-025a-4c3f-97d7-b334e810ef16\") " pod="openshift-marketplace/redhat-marketplace-6tcf8" Oct 06 13:41:04 crc kubenswrapper[4757]: I1006 13:41:04.842827 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/58fe806e-025a-4c3f-97d7-b334e810ef16-utilities\") pod \"redhat-marketplace-6tcf8\" (UID: \"58fe806e-025a-4c3f-97d7-b334e810ef16\") " pod="openshift-marketplace/redhat-marketplace-6tcf8" Oct 06 13:41:04 crc kubenswrapper[4757]: I1006 13:41:04.842871 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/58fe806e-025a-4c3f-97d7-b334e810ef16-catalog-content\") pod \"redhat-marketplace-6tcf8\" (UID: \"58fe806e-025a-4c3f-97d7-b334e810ef16\") " pod="openshift-marketplace/redhat-marketplace-6tcf8" Oct 06 13:41:04 crc kubenswrapper[4757]: I1006 13:41:04.842887 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hgkff\" (UniqueName: \"kubernetes.io/projected/58fe806e-025a-4c3f-97d7-b334e810ef16-kube-api-access-hgkff\") pod \"redhat-marketplace-6tcf8\" (UID: \"58fe806e-025a-4c3f-97d7-b334e810ef16\") " pod="openshift-marketplace/redhat-marketplace-6tcf8" Oct 06 13:41:04 crc kubenswrapper[4757]: I1006 13:41:04.843600 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/58fe806e-025a-4c3f-97d7-b334e810ef16-catalog-content\") pod \"redhat-marketplace-6tcf8\" (UID: \"58fe806e-025a-4c3f-97d7-b334e810ef16\") " pod="openshift-marketplace/redhat-marketplace-6tcf8" Oct 06 13:41:04 crc kubenswrapper[4757]: I1006 13:41:04.843927 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/58fe806e-025a-4c3f-97d7-b334e810ef16-utilities\") pod \"redhat-marketplace-6tcf8\" (UID: \"58fe806e-025a-4c3f-97d7-b334e810ef16\") " pod="openshift-marketplace/redhat-marketplace-6tcf8" Oct 06 13:41:04 crc kubenswrapper[4757]: I1006 13:41:04.876273 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hgkff\" (UniqueName: \"kubernetes.io/projected/58fe806e-025a-4c3f-97d7-b334e810ef16-kube-api-access-hgkff\") pod \"redhat-marketplace-6tcf8\" (UID: \"58fe806e-025a-4c3f-97d7-b334e810ef16\") " pod="openshift-marketplace/redhat-marketplace-6tcf8" Oct 06 13:41:04 crc kubenswrapper[4757]: I1006 13:41:04.954509 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6tcf8" Oct 06 13:41:05 crc kubenswrapper[4757]: I1006 13:41:05.001281 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-5m42j"] Oct 06 13:41:05 crc kubenswrapper[4757]: W1006 13:41:05.020597 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd2a35dbd_b700_4111_a6c4_eb7dc42cfbe7.slice/crio-7e6b2992c6d1893e8c6ffdd7e390bf5a3704471f08457015f24f9a3f24782d90 WatchSource:0}: Error finding container 7e6b2992c6d1893e8c6ffdd7e390bf5a3704471f08457015f24f9a3f24782d90: Status 404 returned error can't find the container with id 7e6b2992c6d1893e8c6ffdd7e390bf5a3704471f08457015f24f9a3f24782d90 Oct 06 13:41:05 crc kubenswrapper[4757]: I1006 13:41:05.029252 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 06 13:41:05 crc kubenswrapper[4757]: I1006 13:41:05.029240 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"02c5c000-37ed-4b65-bfbc-8cef2e245fe3","Type":"ContainerDied","Data":"81db6b01b19e668d9efd200a8902b05789cc4490af847b7ee7ca344c276e5ed9"} Oct 06 13:41:05 crc kubenswrapper[4757]: I1006 13:41:05.029379 4757 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="81db6b01b19e668d9efd200a8902b05789cc4490af847b7ee7ca344c276e5ed9" Oct 06 13:41:05 crc kubenswrapper[4757]: I1006 13:41:05.043646 4757 generic.go:334] "Generic (PLEG): container finished" podID="77dba3de-14f0-4ae5-8fe3-063537d1c5a1" containerID="810ba60e38879d7959d33617a5f962a423ea3eb31a477d2c313d0481abf241a5" exitCode=0 Oct 06 13:41:05 crc kubenswrapper[4757]: I1006 13:41:05.044009 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"77dba3de-14f0-4ae5-8fe3-063537d1c5a1","Type":"ContainerDied","Data":"810ba60e38879d7959d33617a5f962a423ea3eb31a477d2c313d0481abf241a5"} Oct 06 13:41:05 crc kubenswrapper[4757]: I1006 13:41:05.191558 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-xrfzp"] Oct 06 13:41:05 crc kubenswrapper[4757]: I1006 13:41:05.192808 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xrfzp" Oct 06 13:41:05 crc kubenswrapper[4757]: I1006 13:41:05.195018 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Oct 06 13:41:05 crc kubenswrapper[4757]: I1006 13:41:05.200975 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-xrfzp"] Oct 06 13:41:05 crc kubenswrapper[4757]: I1006 13:41:05.360074 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k8fpw\" (UniqueName: \"kubernetes.io/projected/06706805-f5bd-459b-82c8-01bec4aab7ea-kube-api-access-k8fpw\") pod \"redhat-operators-xrfzp\" (UID: \"06706805-f5bd-459b-82c8-01bec4aab7ea\") " pod="openshift-marketplace/redhat-operators-xrfzp" Oct 06 13:41:05 crc kubenswrapper[4757]: I1006 13:41:05.360522 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/06706805-f5bd-459b-82c8-01bec4aab7ea-catalog-content\") pod \"redhat-operators-xrfzp\" (UID: \"06706805-f5bd-459b-82c8-01bec4aab7ea\") " pod="openshift-marketplace/redhat-operators-xrfzp" Oct 06 13:41:05 crc kubenswrapper[4757]: I1006 13:41:05.360553 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/06706805-f5bd-459b-82c8-01bec4aab7ea-utilities\") pod \"redhat-operators-xrfzp\" (UID: \"06706805-f5bd-459b-82c8-01bec4aab7ea\") " pod="openshift-marketplace/redhat-operators-xrfzp" Oct 06 13:41:05 crc kubenswrapper[4757]: I1006 13:41:05.437939 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-6tcf8"] Oct 06 13:41:05 crc kubenswrapper[4757]: W1006 13:41:05.460308 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod58fe806e_025a_4c3f_97d7_b334e810ef16.slice/crio-7680c01a8a28c1237f2671a17728c39b5eed24e4131edfe2a29ff59cc38c042c WatchSource:0}: Error finding container 7680c01a8a28c1237f2671a17728c39b5eed24e4131edfe2a29ff59cc38c042c: Status 404 returned error can't find the container with id 7680c01a8a28c1237f2671a17728c39b5eed24e4131edfe2a29ff59cc38c042c Oct 06 13:41:05 crc kubenswrapper[4757]: I1006 13:41:05.461425 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k8fpw\" (UniqueName: \"kubernetes.io/projected/06706805-f5bd-459b-82c8-01bec4aab7ea-kube-api-access-k8fpw\") pod \"redhat-operators-xrfzp\" (UID: \"06706805-f5bd-459b-82c8-01bec4aab7ea\") " pod="openshift-marketplace/redhat-operators-xrfzp" Oct 06 13:41:05 crc kubenswrapper[4757]: I1006 13:41:05.461485 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/06706805-f5bd-459b-82c8-01bec4aab7ea-catalog-content\") pod \"redhat-operators-xrfzp\" (UID: \"06706805-f5bd-459b-82c8-01bec4aab7ea\") " pod="openshift-marketplace/redhat-operators-xrfzp" Oct 06 13:41:05 crc kubenswrapper[4757]: I1006 13:41:05.461509 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/06706805-f5bd-459b-82c8-01bec4aab7ea-utilities\") pod \"redhat-operators-xrfzp\" (UID: \"06706805-f5bd-459b-82c8-01bec4aab7ea\") " pod="openshift-marketplace/redhat-operators-xrfzp" Oct 06 13:41:05 crc kubenswrapper[4757]: I1006 13:41:05.461962 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/06706805-f5bd-459b-82c8-01bec4aab7ea-utilities\") pod \"redhat-operators-xrfzp\" (UID: \"06706805-f5bd-459b-82c8-01bec4aab7ea\") " pod="openshift-marketplace/redhat-operators-xrfzp" Oct 06 13:41:05 crc kubenswrapper[4757]: I1006 13:41:05.462195 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/06706805-f5bd-459b-82c8-01bec4aab7ea-catalog-content\") pod \"redhat-operators-xrfzp\" (UID: \"06706805-f5bd-459b-82c8-01bec4aab7ea\") " pod="openshift-marketplace/redhat-operators-xrfzp" Oct 06 13:41:05 crc kubenswrapper[4757]: I1006 13:41:05.504864 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k8fpw\" (UniqueName: \"kubernetes.io/projected/06706805-f5bd-459b-82c8-01bec4aab7ea-kube-api-access-k8fpw\") pod \"redhat-operators-xrfzp\" (UID: \"06706805-f5bd-459b-82c8-01bec4aab7ea\") " pod="openshift-marketplace/redhat-operators-xrfzp" Oct 06 13:41:05 crc kubenswrapper[4757]: I1006 13:41:05.538734 4757 patch_prober.go:28] interesting pod/router-default-5444994796-mtmpm container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 06 13:41:05 crc kubenswrapper[4757]: [-]has-synced failed: reason withheld Oct 06 13:41:05 crc kubenswrapper[4757]: [+]process-running ok Oct 06 13:41:05 crc kubenswrapper[4757]: healthz check failed Oct 06 13:41:05 crc kubenswrapper[4757]: I1006 13:41:05.538828 4757 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-mtmpm" podUID="4845c7dd-e037-41f4-914b-bef0afffaad6" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 06 13:41:05 crc kubenswrapper[4757]: I1006 13:41:05.581704 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xrfzp" Oct 06 13:41:05 crc kubenswrapper[4757]: I1006 13:41:05.591442 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-6w7dc"] Oct 06 13:41:05 crc kubenswrapper[4757]: I1006 13:41:05.592801 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6w7dc" Oct 06 13:41:05 crc kubenswrapper[4757]: I1006 13:41:05.614732 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-6w7dc"] Oct 06 13:41:05 crc kubenswrapper[4757]: I1006 13:41:05.775637 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6e388314-efd0-452c-9cca-f8634f501514-catalog-content\") pod \"redhat-operators-6w7dc\" (UID: \"6e388314-efd0-452c-9cca-f8634f501514\") " pod="openshift-marketplace/redhat-operators-6w7dc" Oct 06 13:41:05 crc kubenswrapper[4757]: I1006 13:41:05.775980 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6e388314-efd0-452c-9cca-f8634f501514-utilities\") pod \"redhat-operators-6w7dc\" (UID: \"6e388314-efd0-452c-9cca-f8634f501514\") " pod="openshift-marketplace/redhat-operators-6w7dc" Oct 06 13:41:05 crc kubenswrapper[4757]: I1006 13:41:05.776323 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d4bbb\" (UniqueName: \"kubernetes.io/projected/6e388314-efd0-452c-9cca-f8634f501514-kube-api-access-d4bbb\") pod \"redhat-operators-6w7dc\" (UID: \"6e388314-efd0-452c-9cca-f8634f501514\") " pod="openshift-marketplace/redhat-operators-6w7dc" Oct 06 13:41:05 crc kubenswrapper[4757]: I1006 13:41:05.877033 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6e388314-efd0-452c-9cca-f8634f501514-utilities\") pod \"redhat-operators-6w7dc\" (UID: \"6e388314-efd0-452c-9cca-f8634f501514\") " pod="openshift-marketplace/redhat-operators-6w7dc" Oct 06 13:41:05 crc kubenswrapper[4757]: I1006 13:41:05.877460 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d4bbb\" (UniqueName: \"kubernetes.io/projected/6e388314-efd0-452c-9cca-f8634f501514-kube-api-access-d4bbb\") pod \"redhat-operators-6w7dc\" (UID: \"6e388314-efd0-452c-9cca-f8634f501514\") " pod="openshift-marketplace/redhat-operators-6w7dc" Oct 06 13:41:05 crc kubenswrapper[4757]: I1006 13:41:05.877491 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6e388314-efd0-452c-9cca-f8634f501514-catalog-content\") pod \"redhat-operators-6w7dc\" (UID: \"6e388314-efd0-452c-9cca-f8634f501514\") " pod="openshift-marketplace/redhat-operators-6w7dc" Oct 06 13:41:05 crc kubenswrapper[4757]: I1006 13:41:05.877818 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6e388314-efd0-452c-9cca-f8634f501514-utilities\") pod \"redhat-operators-6w7dc\" (UID: \"6e388314-efd0-452c-9cca-f8634f501514\") " pod="openshift-marketplace/redhat-operators-6w7dc" Oct 06 13:41:05 crc kubenswrapper[4757]: I1006 13:41:05.887737 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6e388314-efd0-452c-9cca-f8634f501514-catalog-content\") pod \"redhat-operators-6w7dc\" (UID: \"6e388314-efd0-452c-9cca-f8634f501514\") " pod="openshift-marketplace/redhat-operators-6w7dc" Oct 06 13:41:05 crc kubenswrapper[4757]: I1006 13:41:05.897381 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d4bbb\" (UniqueName: \"kubernetes.io/projected/6e388314-efd0-452c-9cca-f8634f501514-kube-api-access-d4bbb\") pod \"redhat-operators-6w7dc\" (UID: \"6e388314-efd0-452c-9cca-f8634f501514\") " pod="openshift-marketplace/redhat-operators-6w7dc" Oct 06 13:41:05 crc kubenswrapper[4757]: I1006 13:41:05.988499 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6w7dc" Oct 06 13:41:06 crc kubenswrapper[4757]: I1006 13:41:06.061230 4757 generic.go:334] "Generic (PLEG): container finished" podID="58fe806e-025a-4c3f-97d7-b334e810ef16" containerID="3386212ea2a46d84ddd0846c36e08da2c5d8b8dfb5f2a6dfbe28b3b1f2d934f0" exitCode=0 Oct 06 13:41:06 crc kubenswrapper[4757]: I1006 13:41:06.061316 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6tcf8" event={"ID":"58fe806e-025a-4c3f-97d7-b334e810ef16","Type":"ContainerDied","Data":"3386212ea2a46d84ddd0846c36e08da2c5d8b8dfb5f2a6dfbe28b3b1f2d934f0"} Oct 06 13:41:06 crc kubenswrapper[4757]: I1006 13:41:06.061348 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6tcf8" event={"ID":"58fe806e-025a-4c3f-97d7-b334e810ef16","Type":"ContainerStarted","Data":"7680c01a8a28c1237f2671a17728c39b5eed24e4131edfe2a29ff59cc38c042c"} Oct 06 13:41:06 crc kubenswrapper[4757]: I1006 13:41:06.064183 4757 generic.go:334] "Generic (PLEG): container finished" podID="d2a35dbd-b700-4111-a6c4-eb7dc42cfbe7" containerID="67edf6601fa8e2737de5075910234803fc55ed9b8866b5c37ebf5303cc3fc446" exitCode=0 Oct 06 13:41:06 crc kubenswrapper[4757]: I1006 13:41:06.064294 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5m42j" event={"ID":"d2a35dbd-b700-4111-a6c4-eb7dc42cfbe7","Type":"ContainerDied","Data":"67edf6601fa8e2737de5075910234803fc55ed9b8866b5c37ebf5303cc3fc446"} Oct 06 13:41:06 crc kubenswrapper[4757]: I1006 13:41:06.064340 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5m42j" event={"ID":"d2a35dbd-b700-4111-a6c4-eb7dc42cfbe7","Type":"ContainerStarted","Data":"7e6b2992c6d1893e8c6ffdd7e390bf5a3704471f08457015f24f9a3f24782d90"} Oct 06 13:41:06 crc kubenswrapper[4757]: I1006 13:41:06.115652 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-xrfzp"] Oct 06 13:41:06 crc kubenswrapper[4757]: I1006 13:41:06.453204 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 06 13:41:06 crc kubenswrapper[4757]: I1006 13:41:06.503068 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-6w7dc"] Oct 06 13:41:06 crc kubenswrapper[4757]: I1006 13:41:06.537551 4757 patch_prober.go:28] interesting pod/router-default-5444994796-mtmpm container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 06 13:41:06 crc kubenswrapper[4757]: [-]has-synced failed: reason withheld Oct 06 13:41:06 crc kubenswrapper[4757]: [+]process-running ok Oct 06 13:41:06 crc kubenswrapper[4757]: healthz check failed Oct 06 13:41:06 crc kubenswrapper[4757]: I1006 13:41:06.537601 4757 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-mtmpm" podUID="4845c7dd-e037-41f4-914b-bef0afffaad6" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 06 13:41:06 crc kubenswrapper[4757]: I1006 13:41:06.585481 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/77dba3de-14f0-4ae5-8fe3-063537d1c5a1-kube-api-access\") pod \"77dba3de-14f0-4ae5-8fe3-063537d1c5a1\" (UID: \"77dba3de-14f0-4ae5-8fe3-063537d1c5a1\") " Oct 06 13:41:06 crc kubenswrapper[4757]: I1006 13:41:06.585536 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/77dba3de-14f0-4ae5-8fe3-063537d1c5a1-kubelet-dir\") pod \"77dba3de-14f0-4ae5-8fe3-063537d1c5a1\" (UID: \"77dba3de-14f0-4ae5-8fe3-063537d1c5a1\") " Oct 06 13:41:06 crc kubenswrapper[4757]: I1006 13:41:06.585678 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/77dba3de-14f0-4ae5-8fe3-063537d1c5a1-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "77dba3de-14f0-4ae5-8fe3-063537d1c5a1" (UID: "77dba3de-14f0-4ae5-8fe3-063537d1c5a1"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 06 13:41:06 crc kubenswrapper[4757]: I1006 13:41:06.585920 4757 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/77dba3de-14f0-4ae5-8fe3-063537d1c5a1-kubelet-dir\") on node \"crc\" DevicePath \"\"" Oct 06 13:41:06 crc kubenswrapper[4757]: I1006 13:41:06.593700 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/77dba3de-14f0-4ae5-8fe3-063537d1c5a1-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "77dba3de-14f0-4ae5-8fe3-063537d1c5a1" (UID: "77dba3de-14f0-4ae5-8fe3-063537d1c5a1"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:41:06 crc kubenswrapper[4757]: I1006 13:41:06.687778 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/77dba3de-14f0-4ae5-8fe3-063537d1c5a1-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 06 13:41:07 crc kubenswrapper[4757]: I1006 13:41:07.077767 4757 generic.go:334] "Generic (PLEG): container finished" podID="6e388314-efd0-452c-9cca-f8634f501514" containerID="5db16d51b5b0e6082ec1d269772bef8fadd2ebc69728f38100b8d1979573c66f" exitCode=0 Oct 06 13:41:07 crc kubenswrapper[4757]: I1006 13:41:07.077890 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6w7dc" event={"ID":"6e388314-efd0-452c-9cca-f8634f501514","Type":"ContainerDied","Data":"5db16d51b5b0e6082ec1d269772bef8fadd2ebc69728f38100b8d1979573c66f"} Oct 06 13:41:07 crc kubenswrapper[4757]: I1006 13:41:07.077918 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6w7dc" event={"ID":"6e388314-efd0-452c-9cca-f8634f501514","Type":"ContainerStarted","Data":"f271dfc27d59d6cd0d590d66dd53782e20d67e21a5faaff437d48682ec3a707f"} Oct 06 13:41:07 crc kubenswrapper[4757]: I1006 13:41:07.081275 4757 generic.go:334] "Generic (PLEG): container finished" podID="06706805-f5bd-459b-82c8-01bec4aab7ea" containerID="dabda7f2477df34bb2918c0bbfb3a96aaaa209f0c3a20fc039c654a8b35605a4" exitCode=0 Oct 06 13:41:07 crc kubenswrapper[4757]: I1006 13:41:07.081342 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xrfzp" event={"ID":"06706805-f5bd-459b-82c8-01bec4aab7ea","Type":"ContainerDied","Data":"dabda7f2477df34bb2918c0bbfb3a96aaaa209f0c3a20fc039c654a8b35605a4"} Oct 06 13:41:07 crc kubenswrapper[4757]: I1006 13:41:07.081398 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xrfzp" event={"ID":"06706805-f5bd-459b-82c8-01bec4aab7ea","Type":"ContainerStarted","Data":"d695433f45a8cf8e6905870dba270d0511af9b1f5fa329bf075ddce3a7984fd1"} Oct 06 13:41:07 crc kubenswrapper[4757]: I1006 13:41:07.084455 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"77dba3de-14f0-4ae5-8fe3-063537d1c5a1","Type":"ContainerDied","Data":"c08c8d4b6cee2966248c67bac1a98746f705e07b8faa75519303c8efc3f4c2e4"} Oct 06 13:41:07 crc kubenswrapper[4757]: I1006 13:41:07.084484 4757 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c08c8d4b6cee2966248c67bac1a98746f705e07b8faa75519303c8efc3f4c2e4" Oct 06 13:41:07 crc kubenswrapper[4757]: I1006 13:41:07.084556 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 06 13:41:07 crc kubenswrapper[4757]: I1006 13:41:07.453961 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-hm8qr" Oct 06 13:41:07 crc kubenswrapper[4757]: I1006 13:41:07.458885 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-hm8qr" Oct 06 13:41:07 crc kubenswrapper[4757]: I1006 13:41:07.534071 4757 patch_prober.go:28] interesting pod/router-default-5444994796-mtmpm container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 06 13:41:07 crc kubenswrapper[4757]: [-]has-synced failed: reason withheld Oct 06 13:41:07 crc kubenswrapper[4757]: [+]process-running ok Oct 06 13:41:07 crc kubenswrapper[4757]: healthz check failed Oct 06 13:41:07 crc kubenswrapper[4757]: I1006 13:41:07.534143 4757 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-mtmpm" podUID="4845c7dd-e037-41f4-914b-bef0afffaad6" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 06 13:41:08 crc kubenswrapper[4757]: I1006 13:41:08.534283 4757 patch_prober.go:28] interesting pod/router-default-5444994796-mtmpm container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 06 13:41:08 crc kubenswrapper[4757]: [-]has-synced failed: reason withheld Oct 06 13:41:08 crc kubenswrapper[4757]: [+]process-running ok Oct 06 13:41:08 crc kubenswrapper[4757]: healthz check failed Oct 06 13:41:08 crc kubenswrapper[4757]: I1006 13:41:08.534342 4757 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-mtmpm" podUID="4845c7dd-e037-41f4-914b-bef0afffaad6" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 06 13:41:09 crc kubenswrapper[4757]: I1006 13:41:09.430804 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-wdjlv" Oct 06 13:41:09 crc kubenswrapper[4757]: I1006 13:41:09.529244 4757 patch_prober.go:28] interesting pod/router-default-5444994796-mtmpm container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 06 13:41:09 crc kubenswrapper[4757]: [-]has-synced failed: reason withheld Oct 06 13:41:09 crc kubenswrapper[4757]: [+]process-running ok Oct 06 13:41:09 crc kubenswrapper[4757]: healthz check failed Oct 06 13:41:09 crc kubenswrapper[4757]: I1006 13:41:09.529303 4757 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-mtmpm" podUID="4845c7dd-e037-41f4-914b-bef0afffaad6" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 06 13:41:10 crc kubenswrapper[4757]: I1006 13:41:10.529972 4757 patch_prober.go:28] interesting pod/router-default-5444994796-mtmpm container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 06 13:41:10 crc kubenswrapper[4757]: [-]has-synced failed: reason withheld Oct 06 13:41:10 crc kubenswrapper[4757]: [+]process-running ok Oct 06 13:41:10 crc kubenswrapper[4757]: healthz check failed Oct 06 13:41:10 crc kubenswrapper[4757]: I1006 13:41:10.530065 4757 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-mtmpm" podUID="4845c7dd-e037-41f4-914b-bef0afffaad6" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 06 13:41:11 crc kubenswrapper[4757]: I1006 13:41:11.528674 4757 patch_prober.go:28] interesting pod/router-default-5444994796-mtmpm container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 06 13:41:11 crc kubenswrapper[4757]: [-]has-synced failed: reason withheld Oct 06 13:41:11 crc kubenswrapper[4757]: [+]process-running ok Oct 06 13:41:11 crc kubenswrapper[4757]: healthz check failed Oct 06 13:41:11 crc kubenswrapper[4757]: I1006 13:41:11.528743 4757 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-mtmpm" podUID="4845c7dd-e037-41f4-914b-bef0afffaad6" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 06 13:41:12 crc kubenswrapper[4757]: I1006 13:41:12.529274 4757 patch_prober.go:28] interesting pod/router-default-5444994796-mtmpm container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 06 13:41:12 crc kubenswrapper[4757]: [-]has-synced failed: reason withheld Oct 06 13:41:12 crc kubenswrapper[4757]: [+]process-running ok Oct 06 13:41:12 crc kubenswrapper[4757]: healthz check failed Oct 06 13:41:12 crc kubenswrapper[4757]: I1006 13:41:12.529334 4757 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-mtmpm" podUID="4845c7dd-e037-41f4-914b-bef0afffaad6" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 06 13:41:13 crc kubenswrapper[4757]: I1006 13:41:13.283168 4757 patch_prober.go:28] interesting pod/console-f9d7485db-xbn64 container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.14:8443/health\": dial tcp 10.217.0.14:8443: connect: connection refused" start-of-body= Oct 06 13:41:13 crc kubenswrapper[4757]: I1006 13:41:13.283625 4757 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-xbn64" podUID="0345b748-8161-40b5-bec8-0c36c2d87ea3" containerName="console" probeResult="failure" output="Get \"https://10.217.0.14:8443/health\": dial tcp 10.217.0.14:8443: connect: connection refused" Oct 06 13:41:13 crc kubenswrapper[4757]: I1006 13:41:13.529214 4757 patch_prober.go:28] interesting pod/router-default-5444994796-mtmpm container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 06 13:41:13 crc kubenswrapper[4757]: [-]has-synced failed: reason withheld Oct 06 13:41:13 crc kubenswrapper[4757]: [+]process-running ok Oct 06 13:41:13 crc kubenswrapper[4757]: healthz check failed Oct 06 13:41:13 crc kubenswrapper[4757]: I1006 13:41:13.529276 4757 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-mtmpm" podUID="4845c7dd-e037-41f4-914b-bef0afffaad6" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 06 13:41:14 crc kubenswrapper[4757]: I1006 13:41:14.035275 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-kpprd" Oct 06 13:41:14 crc kubenswrapper[4757]: I1006 13:41:14.296545 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/8a0a24d2-8946-4710-91f2-cc59ecedb5e3-metrics-certs\") pod \"network-metrics-daemon-sc9qx\" (UID: \"8a0a24d2-8946-4710-91f2-cc59ecedb5e3\") " pod="openshift-multus/network-metrics-daemon-sc9qx" Oct 06 13:41:14 crc kubenswrapper[4757]: I1006 13:41:14.338422 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/8a0a24d2-8946-4710-91f2-cc59ecedb5e3-metrics-certs\") pod \"network-metrics-daemon-sc9qx\" (UID: \"8a0a24d2-8946-4710-91f2-cc59ecedb5e3\") " pod="openshift-multus/network-metrics-daemon-sc9qx" Oct 06 13:41:14 crc kubenswrapper[4757]: I1006 13:41:14.531755 4757 patch_prober.go:28] interesting pod/router-default-5444994796-mtmpm container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 06 13:41:14 crc kubenswrapper[4757]: [-]has-synced failed: reason withheld Oct 06 13:41:14 crc kubenswrapper[4757]: [+]process-running ok Oct 06 13:41:14 crc kubenswrapper[4757]: healthz check failed Oct 06 13:41:14 crc kubenswrapper[4757]: I1006 13:41:14.534711 4757 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-mtmpm" podUID="4845c7dd-e037-41f4-914b-bef0afffaad6" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 06 13:41:14 crc kubenswrapper[4757]: I1006 13:41:14.615847 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-sc9qx" Oct 06 13:41:15 crc kubenswrapper[4757]: I1006 13:41:15.530023 4757 patch_prober.go:28] interesting pod/router-default-5444994796-mtmpm container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 06 13:41:15 crc kubenswrapper[4757]: [-]has-synced failed: reason withheld Oct 06 13:41:15 crc kubenswrapper[4757]: [+]process-running ok Oct 06 13:41:15 crc kubenswrapper[4757]: healthz check failed Oct 06 13:41:15 crc kubenswrapper[4757]: I1006 13:41:15.530112 4757 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-mtmpm" podUID="4845c7dd-e037-41f4-914b-bef0afffaad6" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 06 13:41:16 crc kubenswrapper[4757]: I1006 13:41:16.531013 4757 patch_prober.go:28] interesting pod/router-default-5444994796-mtmpm container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 06 13:41:16 crc kubenswrapper[4757]: [+]has-synced ok Oct 06 13:41:16 crc kubenswrapper[4757]: [+]process-running ok Oct 06 13:41:16 crc kubenswrapper[4757]: healthz check failed Oct 06 13:41:16 crc kubenswrapper[4757]: I1006 13:41:16.531078 4757 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-mtmpm" podUID="4845c7dd-e037-41f4-914b-bef0afffaad6" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 06 13:41:17 crc kubenswrapper[4757]: I1006 13:41:17.530083 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-mtmpm" Oct 06 13:41:17 crc kubenswrapper[4757]: I1006 13:41:17.533719 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-mtmpm" Oct 06 13:41:22 crc kubenswrapper[4757]: I1006 13:41:22.661335 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-99wzw" Oct 06 13:41:23 crc kubenswrapper[4757]: I1006 13:41:23.286437 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-xbn64" Oct 06 13:41:23 crc kubenswrapper[4757]: I1006 13:41:23.292343 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-xbn64" Oct 06 13:41:34 crc kubenswrapper[4757]: I1006 13:41:34.361370 4757 patch_prober.go:28] interesting pod/machine-config-daemon-7tb7h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 06 13:41:34 crc kubenswrapper[4757]: I1006 13:41:34.362177 4757 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 06 13:41:34 crc kubenswrapper[4757]: I1006 13:41:34.372164 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-x8wd5" Oct 06 13:41:34 crc kubenswrapper[4757]: E1006 13:41:34.652287 4757 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Oct 06 13:41:34 crc kubenswrapper[4757]: E1006 13:41:34.652533 4757 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-2vpxh,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-d8wfp_openshift-marketplace(f9a9e7aa-5ab4-4877-b808-1097b5c27155): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 06 13:41:34 crc kubenswrapper[4757]: E1006 13:41:34.653803 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-d8wfp" podUID="f9a9e7aa-5ab4-4877-b808-1097b5c27155" Oct 06 13:41:35 crc kubenswrapper[4757]: E1006 13:41:35.381506 4757 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Oct 06 13:41:35 crc kubenswrapper[4757]: E1006 13:41:35.382080 4757 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-ktf9k,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-h5mpv_openshift-marketplace(901fbe7e-0856-4210-b723-30952e1fe25d): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 06 13:41:35 crc kubenswrapper[4757]: E1006 13:41:35.383655 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-h5mpv" podUID="901fbe7e-0856-4210-b723-30952e1fe25d" Oct 06 13:41:35 crc kubenswrapper[4757]: E1006 13:41:35.431431 4757 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Oct 06 13:41:35 crc kubenswrapper[4757]: E1006 13:41:35.431637 4757 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-fpl7n,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-bdfrj_openshift-marketplace(ab36484b-2956-4a35-8379-6b1fc3ffca49): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 06 13:41:35 crc kubenswrapper[4757]: E1006 13:41:35.432876 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-bdfrj" podUID="ab36484b-2956-4a35-8379-6b1fc3ffca49" Oct 06 13:41:35 crc kubenswrapper[4757]: I1006 13:41:35.663390 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-sc9qx"] Oct 06 13:41:38 crc kubenswrapper[4757]: E1006 13:41:38.332274 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-bdfrj" podUID="ab36484b-2956-4a35-8379-6b1fc3ffca49" Oct 06 13:41:38 crc kubenswrapper[4757]: E1006 13:41:38.332285 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-h5mpv" podUID="901fbe7e-0856-4210-b723-30952e1fe25d" Oct 06 13:41:38 crc kubenswrapper[4757]: E1006 13:41:38.332457 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-d8wfp" podUID="f9a9e7aa-5ab4-4877-b808-1097b5c27155" Oct 06 13:41:38 crc kubenswrapper[4757]: E1006 13:41:38.373626 4757 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Oct 06 13:41:38 crc kubenswrapper[4757]: E1006 13:41:38.373824 4757 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-d4bbb,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-6w7dc_openshift-marketplace(6e388314-efd0-452c-9cca-f8634f501514): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 06 13:41:38 crc kubenswrapper[4757]: E1006 13:41:38.375329 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-6w7dc" podUID="6e388314-efd0-452c-9cca-f8634f501514" Oct 06 13:41:39 crc kubenswrapper[4757]: E1006 13:41:39.084977 4757 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Oct 06 13:41:39 crc kubenswrapper[4757]: E1006 13:41:39.085622 4757 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-7zcwv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-5m42j_openshift-marketplace(d2a35dbd-b700-4111-a6c4-eb7dc42cfbe7): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 06 13:41:39 crc kubenswrapper[4757]: E1006 13:41:39.086538 4757 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Oct 06 13:41:39 crc kubenswrapper[4757]: E1006 13:41:39.086690 4757 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-hgkff,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-6tcf8_openshift-marketplace(58fe806e-025a-4c3f-97d7-b334e810ef16): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 06 13:41:39 crc kubenswrapper[4757]: E1006 13:41:39.086781 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-5m42j" podUID="d2a35dbd-b700-4111-a6c4-eb7dc42cfbe7" Oct 06 13:41:39 crc kubenswrapper[4757]: E1006 13:41:39.088754 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-6tcf8" podUID="58fe806e-025a-4c3f-97d7-b334e810ef16" Oct 06 13:41:39 crc kubenswrapper[4757]: E1006 13:41:39.105732 4757 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Oct 06 13:41:39 crc kubenswrapper[4757]: E1006 13:41:39.105979 4757 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-k8fpw,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-xrfzp_openshift-marketplace(06706805-f5bd-459b-82c8-01bec4aab7ea): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 06 13:41:39 crc kubenswrapper[4757]: E1006 13:41:39.107301 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-xrfzp" podUID="06706805-f5bd-459b-82c8-01bec4aab7ea" Oct 06 13:41:39 crc kubenswrapper[4757]: I1006 13:41:39.308810 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-sc9qx" event={"ID":"8a0a24d2-8946-4710-91f2-cc59ecedb5e3","Type":"ContainerStarted","Data":"a0f1cd2873df7b63fd6a27534f4d7a370668feae2ecb0ec5ec1fdac33de5797a"} Oct 06 13:41:39 crc kubenswrapper[4757]: I1006 13:41:39.309329 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-sc9qx" event={"ID":"8a0a24d2-8946-4710-91f2-cc59ecedb5e3","Type":"ContainerStarted","Data":"041306cb199baeb35229bd7c5be6ccd213a4eba2700ac1fa7ffd030df582adf1"} Oct 06 13:41:39 crc kubenswrapper[4757]: I1006 13:41:39.311765 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jgt5d" event={"ID":"69959a98-24ab-49d0-b774-a98f5ebe70e3","Type":"ContainerStarted","Data":"62d07971a1a2d82083de7d297e45f1e6790473adc4573c7d040c40f39e62834f"} Oct 06 13:41:39 crc kubenswrapper[4757]: E1006 13:41:39.313434 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-6w7dc" podUID="6e388314-efd0-452c-9cca-f8634f501514" Oct 06 13:41:39 crc kubenswrapper[4757]: E1006 13:41:39.313654 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-xrfzp" podUID="06706805-f5bd-459b-82c8-01bec4aab7ea" Oct 06 13:41:39 crc kubenswrapper[4757]: E1006 13:41:39.313969 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-6tcf8" podUID="58fe806e-025a-4c3f-97d7-b334e810ef16" Oct 06 13:41:39 crc kubenswrapper[4757]: E1006 13:41:39.314926 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-5m42j" podUID="d2a35dbd-b700-4111-a6c4-eb7dc42cfbe7" Oct 06 13:41:40 crc kubenswrapper[4757]: I1006 13:41:40.319831 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-sc9qx" event={"ID":"8a0a24d2-8946-4710-91f2-cc59ecedb5e3","Type":"ContainerStarted","Data":"89aafae168da0b9fd93501991d1aa126ab551412baa8ce3e8d6297dc119d5419"} Oct 06 13:41:40 crc kubenswrapper[4757]: I1006 13:41:40.321726 4757 generic.go:334] "Generic (PLEG): container finished" podID="69959a98-24ab-49d0-b774-a98f5ebe70e3" containerID="62d07971a1a2d82083de7d297e45f1e6790473adc4573c7d040c40f39e62834f" exitCode=0 Oct 06 13:41:40 crc kubenswrapper[4757]: I1006 13:41:40.321769 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jgt5d" event={"ID":"69959a98-24ab-49d0-b774-a98f5ebe70e3","Type":"ContainerDied","Data":"62d07971a1a2d82083de7d297e45f1e6790473adc4573c7d040c40f39e62834f"} Oct 06 13:41:40 crc kubenswrapper[4757]: I1006 13:41:40.345327 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-sc9qx" podStartSLOduration=169.345296352 podStartE2EDuration="2m49.345296352s" podCreationTimestamp="2025-10-06 13:38:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:41:40.341667478 +0000 UTC m=+188.838986045" watchObservedRunningTime="2025-10-06 13:41:40.345296352 +0000 UTC m=+188.842614969" Oct 06 13:41:40 crc kubenswrapper[4757]: I1006 13:41:40.507133 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 06 13:41:41 crc kubenswrapper[4757]: I1006 13:41:41.338967 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jgt5d" event={"ID":"69959a98-24ab-49d0-b774-a98f5ebe70e3","Type":"ContainerStarted","Data":"ae66be40a35ae9427bd38ac0a2241d7f491b86876595b27780c2d5e67dcfad1e"} Oct 06 13:41:41 crc kubenswrapper[4757]: I1006 13:41:41.371322 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-jgt5d" podStartSLOduration=2.343140527 podStartE2EDuration="39.371295197s" podCreationTimestamp="2025-10-06 13:41:02 +0000 UTC" firstStartedPulling="2025-10-06 13:41:04.000770706 +0000 UTC m=+152.498089283" lastFinishedPulling="2025-10-06 13:41:41.028925406 +0000 UTC m=+189.526243953" observedRunningTime="2025-10-06 13:41:41.367886123 +0000 UTC m=+189.865204680" watchObservedRunningTime="2025-10-06 13:41:41.371295197 +0000 UTC m=+189.868613754" Oct 06 13:41:42 crc kubenswrapper[4757]: I1006 13:41:42.736139 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-jgt5d" Oct 06 13:41:42 crc kubenswrapper[4757]: I1006 13:41:42.736201 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-jgt5d" Oct 06 13:41:44 crc kubenswrapper[4757]: I1006 13:41:44.213827 4757 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-jgt5d" podUID="69959a98-24ab-49d0-b774-a98f5ebe70e3" containerName="registry-server" probeResult="failure" output=< Oct 06 13:41:44 crc kubenswrapper[4757]: timeout: failed to connect service ":50051" within 1s Oct 06 13:41:44 crc kubenswrapper[4757]: > Oct 06 13:41:51 crc kubenswrapper[4757]: I1006 13:41:51.406677 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6tcf8" event={"ID":"58fe806e-025a-4c3f-97d7-b334e810ef16","Type":"ContainerStarted","Data":"183b07022362218d9c4e36fdb14a1bf1fbc216e971fbd01af6ce5baeec87afdd"} Oct 06 13:41:52 crc kubenswrapper[4757]: I1006 13:41:52.412653 4757 generic.go:334] "Generic (PLEG): container finished" podID="ab36484b-2956-4a35-8379-6b1fc3ffca49" containerID="e8e3ec44640774d1a04d895be69915b0002c94e1203d16f57bb58dd2377c9e9f" exitCode=0 Oct 06 13:41:52 crc kubenswrapper[4757]: I1006 13:41:52.412740 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bdfrj" event={"ID":"ab36484b-2956-4a35-8379-6b1fc3ffca49","Type":"ContainerDied","Data":"e8e3ec44640774d1a04d895be69915b0002c94e1203d16f57bb58dd2377c9e9f"} Oct 06 13:41:52 crc kubenswrapper[4757]: I1006 13:41:52.415750 4757 generic.go:334] "Generic (PLEG): container finished" podID="d2a35dbd-b700-4111-a6c4-eb7dc42cfbe7" containerID="525aa25baa48255d37b237a1c51fdc6630b3298430a715a3ec2f32459329a6c3" exitCode=0 Oct 06 13:41:52 crc kubenswrapper[4757]: I1006 13:41:52.415817 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5m42j" event={"ID":"d2a35dbd-b700-4111-a6c4-eb7dc42cfbe7","Type":"ContainerDied","Data":"525aa25baa48255d37b237a1c51fdc6630b3298430a715a3ec2f32459329a6c3"} Oct 06 13:41:52 crc kubenswrapper[4757]: I1006 13:41:52.421748 4757 generic.go:334] "Generic (PLEG): container finished" podID="58fe806e-025a-4c3f-97d7-b334e810ef16" containerID="183b07022362218d9c4e36fdb14a1bf1fbc216e971fbd01af6ce5baeec87afdd" exitCode=0 Oct 06 13:41:52 crc kubenswrapper[4757]: I1006 13:41:52.421800 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6tcf8" event={"ID":"58fe806e-025a-4c3f-97d7-b334e810ef16","Type":"ContainerDied","Data":"183b07022362218d9c4e36fdb14a1bf1fbc216e971fbd01af6ce5baeec87afdd"} Oct 06 13:41:52 crc kubenswrapper[4757]: I1006 13:41:52.787739 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-jgt5d" Oct 06 13:41:52 crc kubenswrapper[4757]: I1006 13:41:52.849143 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-jgt5d" Oct 06 13:41:53 crc kubenswrapper[4757]: I1006 13:41:53.429547 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-jgt5d"] Oct 06 13:41:53 crc kubenswrapper[4757]: I1006 13:41:53.429890 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-h5mpv" event={"ID":"901fbe7e-0856-4210-b723-30952e1fe25d","Type":"ContainerDied","Data":"d3f9dfec115943bc6f5430f9b10277bbc8eab2422af1ff0b849df29c02640124"} Oct 06 13:41:53 crc kubenswrapper[4757]: I1006 13:41:53.429704 4757 generic.go:334] "Generic (PLEG): container finished" podID="901fbe7e-0856-4210-b723-30952e1fe25d" containerID="d3f9dfec115943bc6f5430f9b10277bbc8eab2422af1ff0b849df29c02640124" exitCode=0 Oct 06 13:41:53 crc kubenswrapper[4757]: I1006 13:41:53.432214 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6tcf8" event={"ID":"58fe806e-025a-4c3f-97d7-b334e810ef16","Type":"ContainerStarted","Data":"78e1e4a6a6383599b882caa5cc08a3437041feb6f488596727b2ea4d5a82cbf6"} Oct 06 13:41:53 crc kubenswrapper[4757]: I1006 13:41:53.434277 4757 generic.go:334] "Generic (PLEG): container finished" podID="f9a9e7aa-5ab4-4877-b808-1097b5c27155" containerID="2e338616a77d5fc278f3caf94d4e79310d7097fa200ac3848e4041a07a393115" exitCode=0 Oct 06 13:41:53 crc kubenswrapper[4757]: I1006 13:41:53.434344 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-d8wfp" event={"ID":"f9a9e7aa-5ab4-4877-b808-1097b5c27155","Type":"ContainerDied","Data":"2e338616a77d5fc278f3caf94d4e79310d7097fa200ac3848e4041a07a393115"} Oct 06 13:41:53 crc kubenswrapper[4757]: I1006 13:41:53.437867 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bdfrj" event={"ID":"ab36484b-2956-4a35-8379-6b1fc3ffca49","Type":"ContainerStarted","Data":"0cbb512d91779e6096229e64f651a8715c19c59907badb660491d05178a622e9"} Oct 06 13:41:53 crc kubenswrapper[4757]: I1006 13:41:53.441285 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5m42j" event={"ID":"d2a35dbd-b700-4111-a6c4-eb7dc42cfbe7","Type":"ContainerStarted","Data":"5ebca2af7e8005869eafa12b26cf776eb99af35ae15950b64581581bbd786217"} Oct 06 13:41:53 crc kubenswrapper[4757]: I1006 13:41:53.479779 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-5m42j" podStartSLOduration=2.571188223 podStartE2EDuration="49.479757249s" podCreationTimestamp="2025-10-06 13:41:04 +0000 UTC" firstStartedPulling="2025-10-06 13:41:06.065854569 +0000 UTC m=+154.563173106" lastFinishedPulling="2025-10-06 13:41:52.974423575 +0000 UTC m=+201.471742132" observedRunningTime="2025-10-06 13:41:53.476114832 +0000 UTC m=+201.973433389" watchObservedRunningTime="2025-10-06 13:41:53.479757249 +0000 UTC m=+201.977075796" Oct 06 13:41:53 crc kubenswrapper[4757]: I1006 13:41:53.516417 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-6tcf8" podStartSLOduration=2.594991334 podStartE2EDuration="49.516399439s" podCreationTimestamp="2025-10-06 13:41:04 +0000 UTC" firstStartedPulling="2025-10-06 13:41:06.063263423 +0000 UTC m=+154.560581960" lastFinishedPulling="2025-10-06 13:41:52.984671518 +0000 UTC m=+201.481990065" observedRunningTime="2025-10-06 13:41:53.515587604 +0000 UTC m=+202.012906151" watchObservedRunningTime="2025-10-06 13:41:53.516399439 +0000 UTC m=+202.013717976" Oct 06 13:41:53 crc kubenswrapper[4757]: I1006 13:41:53.537956 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-bdfrj" podStartSLOduration=2.6407812059999998 podStartE2EDuration="52.53793896s" podCreationTimestamp="2025-10-06 13:41:01 +0000 UTC" firstStartedPulling="2025-10-06 13:41:02.988922001 +0000 UTC m=+151.486240538" lastFinishedPulling="2025-10-06 13:41:52.886079745 +0000 UTC m=+201.383398292" observedRunningTime="2025-10-06 13:41:53.534833489 +0000 UTC m=+202.032152026" watchObservedRunningTime="2025-10-06 13:41:53.53793896 +0000 UTC m=+202.035257497" Oct 06 13:41:54 crc kubenswrapper[4757]: I1006 13:41:54.448987 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6w7dc" event={"ID":"6e388314-efd0-452c-9cca-f8634f501514","Type":"ContainerStarted","Data":"202abae68bdcaf3404581fffe7dd42aed901ddbb8d609d59b579f023907598cd"} Oct 06 13:41:54 crc kubenswrapper[4757]: I1006 13:41:54.450967 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xrfzp" event={"ID":"06706805-f5bd-459b-82c8-01bec4aab7ea","Type":"ContainerStarted","Data":"8b22e7dcd2126a007a106b33a10e96d5ec029f1461ee130daea1830e20164e08"} Oct 06 13:41:54 crc kubenswrapper[4757]: I1006 13:41:54.455011 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-h5mpv" event={"ID":"901fbe7e-0856-4210-b723-30952e1fe25d","Type":"ContainerStarted","Data":"90e98178792f3724f2eed442599a0c83eea13f255ca9bd3f4423934a1852fdd8"} Oct 06 13:41:54 crc kubenswrapper[4757]: I1006 13:41:54.455138 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-jgt5d" podUID="69959a98-24ab-49d0-b774-a98f5ebe70e3" containerName="registry-server" containerID="cri-o://ae66be40a35ae9427bd38ac0a2241d7f491b86876595b27780c2d5e67dcfad1e" gracePeriod=2 Oct 06 13:41:54 crc kubenswrapper[4757]: I1006 13:41:54.495649 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-h5mpv" podStartSLOduration=2.244726675 podStartE2EDuration="52.495627617s" podCreationTimestamp="2025-10-06 13:41:02 +0000 UTC" firstStartedPulling="2025-10-06 13:41:04.008915306 +0000 UTC m=+152.506233843" lastFinishedPulling="2025-10-06 13:41:54.259816248 +0000 UTC m=+202.757134785" observedRunningTime="2025-10-06 13:41:54.495513855 +0000 UTC m=+202.992832392" watchObservedRunningTime="2025-10-06 13:41:54.495627617 +0000 UTC m=+202.992946154" Oct 06 13:41:54 crc kubenswrapper[4757]: I1006 13:41:54.521385 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-5m42j" Oct 06 13:41:54 crc kubenswrapper[4757]: I1006 13:41:54.521444 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-5m42j" Oct 06 13:41:54 crc kubenswrapper[4757]: I1006 13:41:54.570285 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-5m42j" Oct 06 13:41:54 crc kubenswrapper[4757]: I1006 13:41:54.860586 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jgt5d" Oct 06 13:41:54 crc kubenswrapper[4757]: I1006 13:41:54.955736 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-6tcf8" Oct 06 13:41:54 crc kubenswrapper[4757]: I1006 13:41:54.956224 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-6tcf8" Oct 06 13:41:55 crc kubenswrapper[4757]: I1006 13:41:55.008065 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-6tcf8" Oct 06 13:41:55 crc kubenswrapper[4757]: I1006 13:41:55.016877 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-896n4\" (UniqueName: \"kubernetes.io/projected/69959a98-24ab-49d0-b774-a98f5ebe70e3-kube-api-access-896n4\") pod \"69959a98-24ab-49d0-b774-a98f5ebe70e3\" (UID: \"69959a98-24ab-49d0-b774-a98f5ebe70e3\") " Oct 06 13:41:55 crc kubenswrapper[4757]: I1006 13:41:55.016931 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/69959a98-24ab-49d0-b774-a98f5ebe70e3-catalog-content\") pod \"69959a98-24ab-49d0-b774-a98f5ebe70e3\" (UID: \"69959a98-24ab-49d0-b774-a98f5ebe70e3\") " Oct 06 13:41:55 crc kubenswrapper[4757]: I1006 13:41:55.016990 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/69959a98-24ab-49d0-b774-a98f5ebe70e3-utilities\") pod \"69959a98-24ab-49d0-b774-a98f5ebe70e3\" (UID: \"69959a98-24ab-49d0-b774-a98f5ebe70e3\") " Oct 06 13:41:55 crc kubenswrapper[4757]: I1006 13:41:55.018259 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/69959a98-24ab-49d0-b774-a98f5ebe70e3-utilities" (OuterVolumeSpecName: "utilities") pod "69959a98-24ab-49d0-b774-a98f5ebe70e3" (UID: "69959a98-24ab-49d0-b774-a98f5ebe70e3"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 13:41:55 crc kubenswrapper[4757]: I1006 13:41:55.026229 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/69959a98-24ab-49d0-b774-a98f5ebe70e3-kube-api-access-896n4" (OuterVolumeSpecName: "kube-api-access-896n4") pod "69959a98-24ab-49d0-b774-a98f5ebe70e3" (UID: "69959a98-24ab-49d0-b774-a98f5ebe70e3"). InnerVolumeSpecName "kube-api-access-896n4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:41:55 crc kubenswrapper[4757]: I1006 13:41:55.089864 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/69959a98-24ab-49d0-b774-a98f5ebe70e3-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "69959a98-24ab-49d0-b774-a98f5ebe70e3" (UID: "69959a98-24ab-49d0-b774-a98f5ebe70e3"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 13:41:55 crc kubenswrapper[4757]: I1006 13:41:55.118806 4757 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/69959a98-24ab-49d0-b774-a98f5ebe70e3-utilities\") on node \"crc\" DevicePath \"\"" Oct 06 13:41:55 crc kubenswrapper[4757]: I1006 13:41:55.118858 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-896n4\" (UniqueName: \"kubernetes.io/projected/69959a98-24ab-49d0-b774-a98f5ebe70e3-kube-api-access-896n4\") on node \"crc\" DevicePath \"\"" Oct 06 13:41:55 crc kubenswrapper[4757]: I1006 13:41:55.118871 4757 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/69959a98-24ab-49d0-b774-a98f5ebe70e3-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 06 13:41:55 crc kubenswrapper[4757]: I1006 13:41:55.462512 4757 generic.go:334] "Generic (PLEG): container finished" podID="6e388314-efd0-452c-9cca-f8634f501514" containerID="202abae68bdcaf3404581fffe7dd42aed901ddbb8d609d59b579f023907598cd" exitCode=0 Oct 06 13:41:55 crc kubenswrapper[4757]: I1006 13:41:55.462724 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6w7dc" event={"ID":"6e388314-efd0-452c-9cca-f8634f501514","Type":"ContainerDied","Data":"202abae68bdcaf3404581fffe7dd42aed901ddbb8d609d59b579f023907598cd"} Oct 06 13:41:55 crc kubenswrapper[4757]: I1006 13:41:55.465941 4757 generic.go:334] "Generic (PLEG): container finished" podID="06706805-f5bd-459b-82c8-01bec4aab7ea" containerID="8b22e7dcd2126a007a106b33a10e96d5ec029f1461ee130daea1830e20164e08" exitCode=0 Oct 06 13:41:55 crc kubenswrapper[4757]: I1006 13:41:55.465987 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xrfzp" event={"ID":"06706805-f5bd-459b-82c8-01bec4aab7ea","Type":"ContainerDied","Data":"8b22e7dcd2126a007a106b33a10e96d5ec029f1461ee130daea1830e20164e08"} Oct 06 13:41:55 crc kubenswrapper[4757]: I1006 13:41:55.477536 4757 generic.go:334] "Generic (PLEG): container finished" podID="69959a98-24ab-49d0-b774-a98f5ebe70e3" containerID="ae66be40a35ae9427bd38ac0a2241d7f491b86876595b27780c2d5e67dcfad1e" exitCode=0 Oct 06 13:41:55 crc kubenswrapper[4757]: I1006 13:41:55.477628 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jgt5d" Oct 06 13:41:55 crc kubenswrapper[4757]: I1006 13:41:55.477626 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jgt5d" event={"ID":"69959a98-24ab-49d0-b774-a98f5ebe70e3","Type":"ContainerDied","Data":"ae66be40a35ae9427bd38ac0a2241d7f491b86876595b27780c2d5e67dcfad1e"} Oct 06 13:41:55 crc kubenswrapper[4757]: I1006 13:41:55.477698 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jgt5d" event={"ID":"69959a98-24ab-49d0-b774-a98f5ebe70e3","Type":"ContainerDied","Data":"1783e0312f251ef23d10189019424113b7e25f74be77f2a8a05fa365acb3cc6f"} Oct 06 13:41:55 crc kubenswrapper[4757]: I1006 13:41:55.477725 4757 scope.go:117] "RemoveContainer" containerID="ae66be40a35ae9427bd38ac0a2241d7f491b86876595b27780c2d5e67dcfad1e" Oct 06 13:41:55 crc kubenswrapper[4757]: I1006 13:41:55.503788 4757 scope.go:117] "RemoveContainer" containerID="62d07971a1a2d82083de7d297e45f1e6790473adc4573c7d040c40f39e62834f" Oct 06 13:41:55 crc kubenswrapper[4757]: I1006 13:41:55.523031 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-jgt5d"] Oct 06 13:41:55 crc kubenswrapper[4757]: I1006 13:41:55.527158 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-jgt5d"] Oct 06 13:41:55 crc kubenswrapper[4757]: I1006 13:41:55.534220 4757 scope.go:117] "RemoveContainer" containerID="abde05d6be0a40b05252ba59b3c64776430954fd3f6a9d9ba5dc5ad0cdeef71a" Oct 06 13:41:55 crc kubenswrapper[4757]: I1006 13:41:55.561363 4757 scope.go:117] "RemoveContainer" containerID="ae66be40a35ae9427bd38ac0a2241d7f491b86876595b27780c2d5e67dcfad1e" Oct 06 13:41:55 crc kubenswrapper[4757]: E1006 13:41:55.562003 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ae66be40a35ae9427bd38ac0a2241d7f491b86876595b27780c2d5e67dcfad1e\": container with ID starting with ae66be40a35ae9427bd38ac0a2241d7f491b86876595b27780c2d5e67dcfad1e not found: ID does not exist" containerID="ae66be40a35ae9427bd38ac0a2241d7f491b86876595b27780c2d5e67dcfad1e" Oct 06 13:41:55 crc kubenswrapper[4757]: I1006 13:41:55.562033 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ae66be40a35ae9427bd38ac0a2241d7f491b86876595b27780c2d5e67dcfad1e"} err="failed to get container status \"ae66be40a35ae9427bd38ac0a2241d7f491b86876595b27780c2d5e67dcfad1e\": rpc error: code = NotFound desc = could not find container \"ae66be40a35ae9427bd38ac0a2241d7f491b86876595b27780c2d5e67dcfad1e\": container with ID starting with ae66be40a35ae9427bd38ac0a2241d7f491b86876595b27780c2d5e67dcfad1e not found: ID does not exist" Oct 06 13:41:55 crc kubenswrapper[4757]: I1006 13:41:55.562074 4757 scope.go:117] "RemoveContainer" containerID="62d07971a1a2d82083de7d297e45f1e6790473adc4573c7d040c40f39e62834f" Oct 06 13:41:55 crc kubenswrapper[4757]: E1006 13:41:55.562462 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"62d07971a1a2d82083de7d297e45f1e6790473adc4573c7d040c40f39e62834f\": container with ID starting with 62d07971a1a2d82083de7d297e45f1e6790473adc4573c7d040c40f39e62834f not found: ID does not exist" containerID="62d07971a1a2d82083de7d297e45f1e6790473adc4573c7d040c40f39e62834f" Oct 06 13:41:55 crc kubenswrapper[4757]: I1006 13:41:55.562522 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"62d07971a1a2d82083de7d297e45f1e6790473adc4573c7d040c40f39e62834f"} err="failed to get container status \"62d07971a1a2d82083de7d297e45f1e6790473adc4573c7d040c40f39e62834f\": rpc error: code = NotFound desc = could not find container \"62d07971a1a2d82083de7d297e45f1e6790473adc4573c7d040c40f39e62834f\": container with ID starting with 62d07971a1a2d82083de7d297e45f1e6790473adc4573c7d040c40f39e62834f not found: ID does not exist" Oct 06 13:41:55 crc kubenswrapper[4757]: I1006 13:41:55.562560 4757 scope.go:117] "RemoveContainer" containerID="abde05d6be0a40b05252ba59b3c64776430954fd3f6a9d9ba5dc5ad0cdeef71a" Oct 06 13:41:55 crc kubenswrapper[4757]: E1006 13:41:55.565051 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"abde05d6be0a40b05252ba59b3c64776430954fd3f6a9d9ba5dc5ad0cdeef71a\": container with ID starting with abde05d6be0a40b05252ba59b3c64776430954fd3f6a9d9ba5dc5ad0cdeef71a not found: ID does not exist" containerID="abde05d6be0a40b05252ba59b3c64776430954fd3f6a9d9ba5dc5ad0cdeef71a" Oct 06 13:41:55 crc kubenswrapper[4757]: I1006 13:41:55.565087 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"abde05d6be0a40b05252ba59b3c64776430954fd3f6a9d9ba5dc5ad0cdeef71a"} err="failed to get container status \"abde05d6be0a40b05252ba59b3c64776430954fd3f6a9d9ba5dc5ad0cdeef71a\": rpc error: code = NotFound desc = could not find container \"abde05d6be0a40b05252ba59b3c64776430954fd3f6a9d9ba5dc5ad0cdeef71a\": container with ID starting with abde05d6be0a40b05252ba59b3c64776430954fd3f6a9d9ba5dc5ad0cdeef71a not found: ID does not exist" Oct 06 13:41:56 crc kubenswrapper[4757]: I1006 13:41:56.187001 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="69959a98-24ab-49d0-b774-a98f5ebe70e3" path="/var/lib/kubelet/pods/69959a98-24ab-49d0-b774-a98f5ebe70e3/volumes" Oct 06 13:41:56 crc kubenswrapper[4757]: I1006 13:41:56.485350 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6w7dc" event={"ID":"6e388314-efd0-452c-9cca-f8634f501514","Type":"ContainerStarted","Data":"30d54bec74a921f415ecfb4d8ec010f789e6d2c6812a6193ae7b1fa8c5546fe0"} Oct 06 13:41:56 crc kubenswrapper[4757]: I1006 13:41:56.507468 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-6w7dc" podStartSLOduration=2.638196899 podStartE2EDuration="51.507448138s" podCreationTimestamp="2025-10-06 13:41:05 +0000 UTC" firstStartedPulling="2025-10-06 13:41:07.082055873 +0000 UTC m=+155.579374410" lastFinishedPulling="2025-10-06 13:41:55.951307112 +0000 UTC m=+204.448625649" observedRunningTime="2025-10-06 13:41:56.50505156 +0000 UTC m=+205.002370097" watchObservedRunningTime="2025-10-06 13:41:56.507448138 +0000 UTC m=+205.004766675" Oct 06 13:41:58 crc kubenswrapper[4757]: I1006 13:41:58.509593 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xrfzp" event={"ID":"06706805-f5bd-459b-82c8-01bec4aab7ea","Type":"ContainerStarted","Data":"2019012ee5ec079e0f7f17e057ea96e9b0d50bfd61e15c088b415b7138e09f90"} Oct 06 13:41:58 crc kubenswrapper[4757]: I1006 13:41:58.534913 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-xrfzp" podStartSLOduration=2.861314659 podStartE2EDuration="53.534886655s" podCreationTimestamp="2025-10-06 13:41:05 +0000 UTC" firstStartedPulling="2025-10-06 13:41:07.086940624 +0000 UTC m=+155.584259161" lastFinishedPulling="2025-10-06 13:41:57.76051258 +0000 UTC m=+206.257831157" observedRunningTime="2025-10-06 13:41:58.531038399 +0000 UTC m=+207.028356936" watchObservedRunningTime="2025-10-06 13:41:58.534886655 +0000 UTC m=+207.032205212" Oct 06 13:41:59 crc kubenswrapper[4757]: I1006 13:41:59.481488 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-zcwgr"] Oct 06 13:42:00 crc kubenswrapper[4757]: I1006 13:42:00.520821 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-d8wfp" event={"ID":"f9a9e7aa-5ab4-4877-b808-1097b5c27155","Type":"ContainerStarted","Data":"5a35be82c5d436c292b43e65099b1671b8bd4348d690d2bf099876a27e8b199f"} Oct 06 13:42:01 crc kubenswrapper[4757]: I1006 13:42:01.546831 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-d8wfp" podStartSLOduration=3.575190297 podStartE2EDuration="59.54681076s" podCreationTimestamp="2025-10-06 13:41:02 +0000 UTC" firstStartedPulling="2025-10-06 13:41:03.996165466 +0000 UTC m=+152.493484003" lastFinishedPulling="2025-10-06 13:41:59.967785929 +0000 UTC m=+208.465104466" observedRunningTime="2025-10-06 13:42:01.543345958 +0000 UTC m=+210.040664495" watchObservedRunningTime="2025-10-06 13:42:01.54681076 +0000 UTC m=+210.044129297" Oct 06 13:42:02 crc kubenswrapper[4757]: I1006 13:42:02.342438 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-bdfrj" Oct 06 13:42:02 crc kubenswrapper[4757]: I1006 13:42:02.342954 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-bdfrj" Oct 06 13:42:02 crc kubenswrapper[4757]: I1006 13:42:02.402002 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-bdfrj" Oct 06 13:42:02 crc kubenswrapper[4757]: I1006 13:42:02.515123 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-h5mpv" Oct 06 13:42:02 crc kubenswrapper[4757]: I1006 13:42:02.515170 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-h5mpv" Oct 06 13:42:02 crc kubenswrapper[4757]: I1006 13:42:02.553960 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-h5mpv" Oct 06 13:42:02 crc kubenswrapper[4757]: I1006 13:42:02.571123 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-bdfrj" Oct 06 13:42:02 crc kubenswrapper[4757]: I1006 13:42:02.597865 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-h5mpv" Oct 06 13:42:02 crc kubenswrapper[4757]: I1006 13:42:02.963568 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-d8wfp" Oct 06 13:42:02 crc kubenswrapper[4757]: I1006 13:42:02.963637 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-d8wfp" Oct 06 13:42:03 crc kubenswrapper[4757]: I1006 13:42:03.021824 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-d8wfp" Oct 06 13:42:04 crc kubenswrapper[4757]: I1006 13:42:04.361484 4757 patch_prober.go:28] interesting pod/machine-config-daemon-7tb7h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 06 13:42:04 crc kubenswrapper[4757]: I1006 13:42:04.361814 4757 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 06 13:42:04 crc kubenswrapper[4757]: I1006 13:42:04.361867 4757 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" Oct 06 13:42:04 crc kubenswrapper[4757]: I1006 13:42:04.362520 4757 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"151c97566cda63000d69b51293c84fad70c6e331d3a1b62bd3a03ee5732d5214"} pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 06 13:42:04 crc kubenswrapper[4757]: I1006 13:42:04.362589 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" containerName="machine-config-daemon" containerID="cri-o://151c97566cda63000d69b51293c84fad70c6e331d3a1b62bd3a03ee5732d5214" gracePeriod=600 Oct 06 13:42:04 crc kubenswrapper[4757]: I1006 13:42:04.571868 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-5m42j" Oct 06 13:42:05 crc kubenswrapper[4757]: I1006 13:42:05.002271 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-6tcf8" Oct 06 13:42:05 crc kubenswrapper[4757]: I1006 13:42:05.547940 4757 generic.go:334] "Generic (PLEG): container finished" podID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" containerID="151c97566cda63000d69b51293c84fad70c6e331d3a1b62bd3a03ee5732d5214" exitCode=0 Oct 06 13:42:05 crc kubenswrapper[4757]: I1006 13:42:05.548015 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" event={"ID":"0010c888-d5ad-4b2b-8309-1647fdf0dee3","Type":"ContainerDied","Data":"151c97566cda63000d69b51293c84fad70c6e331d3a1b62bd3a03ee5732d5214"} Oct 06 13:42:05 crc kubenswrapper[4757]: I1006 13:42:05.548415 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" event={"ID":"0010c888-d5ad-4b2b-8309-1647fdf0dee3","Type":"ContainerStarted","Data":"cbd63c2fd83eb3b13b70ca6d17410007d8a08e6fecff91ef597ee01a17e3a5db"} Oct 06 13:42:05 crc kubenswrapper[4757]: I1006 13:42:05.582245 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-xrfzp" Oct 06 13:42:05 crc kubenswrapper[4757]: I1006 13:42:05.582643 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-xrfzp" Oct 06 13:42:05 crc kubenswrapper[4757]: I1006 13:42:05.631241 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-xrfzp" Oct 06 13:42:05 crc kubenswrapper[4757]: I1006 13:42:05.989152 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-6w7dc" Oct 06 13:42:05 crc kubenswrapper[4757]: I1006 13:42:05.989244 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-6w7dc" Oct 06 13:42:06 crc kubenswrapper[4757]: I1006 13:42:06.038606 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-6w7dc" Oct 06 13:42:06 crc kubenswrapper[4757]: I1006 13:42:06.592992 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-xrfzp" Oct 06 13:42:06 crc kubenswrapper[4757]: I1006 13:42:06.616536 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-6w7dc" Oct 06 13:42:08 crc kubenswrapper[4757]: I1006 13:42:08.627954 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-6tcf8"] Oct 06 13:42:08 crc kubenswrapper[4757]: I1006 13:42:08.628523 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-6tcf8" podUID="58fe806e-025a-4c3f-97d7-b334e810ef16" containerName="registry-server" containerID="cri-o://78e1e4a6a6383599b882caa5cc08a3437041feb6f488596727b2ea4d5a82cbf6" gracePeriod=2 Oct 06 13:42:08 crc kubenswrapper[4757]: I1006 13:42:08.825833 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-6w7dc"] Oct 06 13:42:08 crc kubenswrapper[4757]: I1006 13:42:08.826719 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-6w7dc" podUID="6e388314-efd0-452c-9cca-f8634f501514" containerName="registry-server" containerID="cri-o://30d54bec74a921f415ecfb4d8ec010f789e6d2c6812a6193ae7b1fa8c5546fe0" gracePeriod=2 Oct 06 13:42:09 crc kubenswrapper[4757]: I1006 13:42:09.028491 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6tcf8" Oct 06 13:42:09 crc kubenswrapper[4757]: I1006 13:42:09.121744 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/58fe806e-025a-4c3f-97d7-b334e810ef16-utilities\") pod \"58fe806e-025a-4c3f-97d7-b334e810ef16\" (UID: \"58fe806e-025a-4c3f-97d7-b334e810ef16\") " Oct 06 13:42:09 crc kubenswrapper[4757]: I1006 13:42:09.122080 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hgkff\" (UniqueName: \"kubernetes.io/projected/58fe806e-025a-4c3f-97d7-b334e810ef16-kube-api-access-hgkff\") pod \"58fe806e-025a-4c3f-97d7-b334e810ef16\" (UID: \"58fe806e-025a-4c3f-97d7-b334e810ef16\") " Oct 06 13:42:09 crc kubenswrapper[4757]: I1006 13:42:09.122119 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/58fe806e-025a-4c3f-97d7-b334e810ef16-catalog-content\") pod \"58fe806e-025a-4c3f-97d7-b334e810ef16\" (UID: \"58fe806e-025a-4c3f-97d7-b334e810ef16\") " Oct 06 13:42:09 crc kubenswrapper[4757]: I1006 13:42:09.123588 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/58fe806e-025a-4c3f-97d7-b334e810ef16-utilities" (OuterVolumeSpecName: "utilities") pod "58fe806e-025a-4c3f-97d7-b334e810ef16" (UID: "58fe806e-025a-4c3f-97d7-b334e810ef16"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 13:42:09 crc kubenswrapper[4757]: I1006 13:42:09.129356 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/58fe806e-025a-4c3f-97d7-b334e810ef16-kube-api-access-hgkff" (OuterVolumeSpecName: "kube-api-access-hgkff") pod "58fe806e-025a-4c3f-97d7-b334e810ef16" (UID: "58fe806e-025a-4c3f-97d7-b334e810ef16"). InnerVolumeSpecName "kube-api-access-hgkff". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:42:09 crc kubenswrapper[4757]: I1006 13:42:09.138353 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/58fe806e-025a-4c3f-97d7-b334e810ef16-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "58fe806e-025a-4c3f-97d7-b334e810ef16" (UID: "58fe806e-025a-4c3f-97d7-b334e810ef16"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 13:42:09 crc kubenswrapper[4757]: I1006 13:42:09.224082 4757 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/58fe806e-025a-4c3f-97d7-b334e810ef16-utilities\") on node \"crc\" DevicePath \"\"" Oct 06 13:42:09 crc kubenswrapper[4757]: I1006 13:42:09.224139 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hgkff\" (UniqueName: \"kubernetes.io/projected/58fe806e-025a-4c3f-97d7-b334e810ef16-kube-api-access-hgkff\") on node \"crc\" DevicePath \"\"" Oct 06 13:42:09 crc kubenswrapper[4757]: I1006 13:42:09.224154 4757 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/58fe806e-025a-4c3f-97d7-b334e810ef16-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 06 13:42:09 crc kubenswrapper[4757]: I1006 13:42:09.242655 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6w7dc" Oct 06 13:42:09 crc kubenswrapper[4757]: I1006 13:42:09.325525 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6e388314-efd0-452c-9cca-f8634f501514-utilities\") pod \"6e388314-efd0-452c-9cca-f8634f501514\" (UID: \"6e388314-efd0-452c-9cca-f8634f501514\") " Oct 06 13:42:09 crc kubenswrapper[4757]: I1006 13:42:09.325638 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4bbb\" (UniqueName: \"kubernetes.io/projected/6e388314-efd0-452c-9cca-f8634f501514-kube-api-access-d4bbb\") pod \"6e388314-efd0-452c-9cca-f8634f501514\" (UID: \"6e388314-efd0-452c-9cca-f8634f501514\") " Oct 06 13:42:09 crc kubenswrapper[4757]: I1006 13:42:09.325663 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6e388314-efd0-452c-9cca-f8634f501514-catalog-content\") pod \"6e388314-efd0-452c-9cca-f8634f501514\" (UID: \"6e388314-efd0-452c-9cca-f8634f501514\") " Oct 06 13:42:09 crc kubenswrapper[4757]: I1006 13:42:09.326317 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6e388314-efd0-452c-9cca-f8634f501514-utilities" (OuterVolumeSpecName: "utilities") pod "6e388314-efd0-452c-9cca-f8634f501514" (UID: "6e388314-efd0-452c-9cca-f8634f501514"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 13:42:09 crc kubenswrapper[4757]: I1006 13:42:09.329218 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6e388314-efd0-452c-9cca-f8634f501514-kube-api-access-d4bbb" (OuterVolumeSpecName: "kube-api-access-d4bbb") pod "6e388314-efd0-452c-9cca-f8634f501514" (UID: "6e388314-efd0-452c-9cca-f8634f501514"). InnerVolumeSpecName "kube-api-access-d4bbb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:42:09 crc kubenswrapper[4757]: I1006 13:42:09.417335 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6e388314-efd0-452c-9cca-f8634f501514-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6e388314-efd0-452c-9cca-f8634f501514" (UID: "6e388314-efd0-452c-9cca-f8634f501514"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 13:42:09 crc kubenswrapper[4757]: I1006 13:42:09.426754 4757 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6e388314-efd0-452c-9cca-f8634f501514-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 06 13:42:09 crc kubenswrapper[4757]: I1006 13:42:09.426806 4757 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6e388314-efd0-452c-9cca-f8634f501514-utilities\") on node \"crc\" DevicePath \"\"" Oct 06 13:42:09 crc kubenswrapper[4757]: I1006 13:42:09.426821 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4bbb\" (UniqueName: \"kubernetes.io/projected/6e388314-efd0-452c-9cca-f8634f501514-kube-api-access-d4bbb\") on node \"crc\" DevicePath \"\"" Oct 06 13:42:09 crc kubenswrapper[4757]: I1006 13:42:09.595600 4757 generic.go:334] "Generic (PLEG): container finished" podID="58fe806e-025a-4c3f-97d7-b334e810ef16" containerID="78e1e4a6a6383599b882caa5cc08a3437041feb6f488596727b2ea4d5a82cbf6" exitCode=0 Oct 06 13:42:09 crc kubenswrapper[4757]: I1006 13:42:09.595648 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6tcf8" event={"ID":"58fe806e-025a-4c3f-97d7-b334e810ef16","Type":"ContainerDied","Data":"78e1e4a6a6383599b882caa5cc08a3437041feb6f488596727b2ea4d5a82cbf6"} Oct 06 13:42:09 crc kubenswrapper[4757]: I1006 13:42:09.595688 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6tcf8" Oct 06 13:42:09 crc kubenswrapper[4757]: I1006 13:42:09.595715 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6tcf8" event={"ID":"58fe806e-025a-4c3f-97d7-b334e810ef16","Type":"ContainerDied","Data":"7680c01a8a28c1237f2671a17728c39b5eed24e4131edfe2a29ff59cc38c042c"} Oct 06 13:42:09 crc kubenswrapper[4757]: I1006 13:42:09.595740 4757 scope.go:117] "RemoveContainer" containerID="78e1e4a6a6383599b882caa5cc08a3437041feb6f488596727b2ea4d5a82cbf6" Oct 06 13:42:09 crc kubenswrapper[4757]: I1006 13:42:09.600655 4757 generic.go:334] "Generic (PLEG): container finished" podID="6e388314-efd0-452c-9cca-f8634f501514" containerID="30d54bec74a921f415ecfb4d8ec010f789e6d2c6812a6193ae7b1fa8c5546fe0" exitCode=0 Oct 06 13:42:09 crc kubenswrapper[4757]: I1006 13:42:09.600696 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6w7dc" event={"ID":"6e388314-efd0-452c-9cca-f8634f501514","Type":"ContainerDied","Data":"30d54bec74a921f415ecfb4d8ec010f789e6d2c6812a6193ae7b1fa8c5546fe0"} Oct 06 13:42:09 crc kubenswrapper[4757]: I1006 13:42:09.600716 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6w7dc" Oct 06 13:42:09 crc kubenswrapper[4757]: I1006 13:42:09.600718 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6w7dc" event={"ID":"6e388314-efd0-452c-9cca-f8634f501514","Type":"ContainerDied","Data":"f271dfc27d59d6cd0d590d66dd53782e20d67e21a5faaff437d48682ec3a707f"} Oct 06 13:42:09 crc kubenswrapper[4757]: I1006 13:42:09.620563 4757 scope.go:117] "RemoveContainer" containerID="183b07022362218d9c4e36fdb14a1bf1fbc216e971fbd01af6ce5baeec87afdd" Oct 06 13:42:09 crc kubenswrapper[4757]: I1006 13:42:09.623526 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-6tcf8"] Oct 06 13:42:09 crc kubenswrapper[4757]: I1006 13:42:09.626839 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-6tcf8"] Oct 06 13:42:09 crc kubenswrapper[4757]: I1006 13:42:09.641387 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-6w7dc"] Oct 06 13:42:09 crc kubenswrapper[4757]: I1006 13:42:09.643876 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-6w7dc"] Oct 06 13:42:09 crc kubenswrapper[4757]: I1006 13:42:09.650341 4757 scope.go:117] "RemoveContainer" containerID="3386212ea2a46d84ddd0846c36e08da2c5d8b8dfb5f2a6dfbe28b3b1f2d934f0" Oct 06 13:42:09 crc kubenswrapper[4757]: I1006 13:42:09.664269 4757 scope.go:117] "RemoveContainer" containerID="78e1e4a6a6383599b882caa5cc08a3437041feb6f488596727b2ea4d5a82cbf6" Oct 06 13:42:09 crc kubenswrapper[4757]: E1006 13:42:09.664700 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"78e1e4a6a6383599b882caa5cc08a3437041feb6f488596727b2ea4d5a82cbf6\": container with ID starting with 78e1e4a6a6383599b882caa5cc08a3437041feb6f488596727b2ea4d5a82cbf6 not found: ID does not exist" containerID="78e1e4a6a6383599b882caa5cc08a3437041feb6f488596727b2ea4d5a82cbf6" Oct 06 13:42:09 crc kubenswrapper[4757]: I1006 13:42:09.664743 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"78e1e4a6a6383599b882caa5cc08a3437041feb6f488596727b2ea4d5a82cbf6"} err="failed to get container status \"78e1e4a6a6383599b882caa5cc08a3437041feb6f488596727b2ea4d5a82cbf6\": rpc error: code = NotFound desc = could not find container \"78e1e4a6a6383599b882caa5cc08a3437041feb6f488596727b2ea4d5a82cbf6\": container with ID starting with 78e1e4a6a6383599b882caa5cc08a3437041feb6f488596727b2ea4d5a82cbf6 not found: ID does not exist" Oct 06 13:42:09 crc kubenswrapper[4757]: I1006 13:42:09.664784 4757 scope.go:117] "RemoveContainer" containerID="183b07022362218d9c4e36fdb14a1bf1fbc216e971fbd01af6ce5baeec87afdd" Oct 06 13:42:09 crc kubenswrapper[4757]: E1006 13:42:09.665110 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"183b07022362218d9c4e36fdb14a1bf1fbc216e971fbd01af6ce5baeec87afdd\": container with ID starting with 183b07022362218d9c4e36fdb14a1bf1fbc216e971fbd01af6ce5baeec87afdd not found: ID does not exist" containerID="183b07022362218d9c4e36fdb14a1bf1fbc216e971fbd01af6ce5baeec87afdd" Oct 06 13:42:09 crc kubenswrapper[4757]: I1006 13:42:09.665148 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"183b07022362218d9c4e36fdb14a1bf1fbc216e971fbd01af6ce5baeec87afdd"} err="failed to get container status \"183b07022362218d9c4e36fdb14a1bf1fbc216e971fbd01af6ce5baeec87afdd\": rpc error: code = NotFound desc = could not find container \"183b07022362218d9c4e36fdb14a1bf1fbc216e971fbd01af6ce5baeec87afdd\": container with ID starting with 183b07022362218d9c4e36fdb14a1bf1fbc216e971fbd01af6ce5baeec87afdd not found: ID does not exist" Oct 06 13:42:09 crc kubenswrapper[4757]: I1006 13:42:09.665173 4757 scope.go:117] "RemoveContainer" containerID="3386212ea2a46d84ddd0846c36e08da2c5d8b8dfb5f2a6dfbe28b3b1f2d934f0" Oct 06 13:42:09 crc kubenswrapper[4757]: E1006 13:42:09.665485 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3386212ea2a46d84ddd0846c36e08da2c5d8b8dfb5f2a6dfbe28b3b1f2d934f0\": container with ID starting with 3386212ea2a46d84ddd0846c36e08da2c5d8b8dfb5f2a6dfbe28b3b1f2d934f0 not found: ID does not exist" containerID="3386212ea2a46d84ddd0846c36e08da2c5d8b8dfb5f2a6dfbe28b3b1f2d934f0" Oct 06 13:42:09 crc kubenswrapper[4757]: I1006 13:42:09.665507 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3386212ea2a46d84ddd0846c36e08da2c5d8b8dfb5f2a6dfbe28b3b1f2d934f0"} err="failed to get container status \"3386212ea2a46d84ddd0846c36e08da2c5d8b8dfb5f2a6dfbe28b3b1f2d934f0\": rpc error: code = NotFound desc = could not find container \"3386212ea2a46d84ddd0846c36e08da2c5d8b8dfb5f2a6dfbe28b3b1f2d934f0\": container with ID starting with 3386212ea2a46d84ddd0846c36e08da2c5d8b8dfb5f2a6dfbe28b3b1f2d934f0 not found: ID does not exist" Oct 06 13:42:09 crc kubenswrapper[4757]: I1006 13:42:09.665525 4757 scope.go:117] "RemoveContainer" containerID="30d54bec74a921f415ecfb4d8ec010f789e6d2c6812a6193ae7b1fa8c5546fe0" Oct 06 13:42:09 crc kubenswrapper[4757]: I1006 13:42:09.677396 4757 scope.go:117] "RemoveContainer" containerID="202abae68bdcaf3404581fffe7dd42aed901ddbb8d609d59b579f023907598cd" Oct 06 13:42:09 crc kubenswrapper[4757]: I1006 13:42:09.691992 4757 scope.go:117] "RemoveContainer" containerID="5db16d51b5b0e6082ec1d269772bef8fadd2ebc69728f38100b8d1979573c66f" Oct 06 13:42:09 crc kubenswrapper[4757]: I1006 13:42:09.706143 4757 scope.go:117] "RemoveContainer" containerID="30d54bec74a921f415ecfb4d8ec010f789e6d2c6812a6193ae7b1fa8c5546fe0" Oct 06 13:42:09 crc kubenswrapper[4757]: E1006 13:42:09.706514 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"30d54bec74a921f415ecfb4d8ec010f789e6d2c6812a6193ae7b1fa8c5546fe0\": container with ID starting with 30d54bec74a921f415ecfb4d8ec010f789e6d2c6812a6193ae7b1fa8c5546fe0 not found: ID does not exist" containerID="30d54bec74a921f415ecfb4d8ec010f789e6d2c6812a6193ae7b1fa8c5546fe0" Oct 06 13:42:09 crc kubenswrapper[4757]: I1006 13:42:09.706555 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"30d54bec74a921f415ecfb4d8ec010f789e6d2c6812a6193ae7b1fa8c5546fe0"} err="failed to get container status \"30d54bec74a921f415ecfb4d8ec010f789e6d2c6812a6193ae7b1fa8c5546fe0\": rpc error: code = NotFound desc = could not find container \"30d54bec74a921f415ecfb4d8ec010f789e6d2c6812a6193ae7b1fa8c5546fe0\": container with ID starting with 30d54bec74a921f415ecfb4d8ec010f789e6d2c6812a6193ae7b1fa8c5546fe0 not found: ID does not exist" Oct 06 13:42:09 crc kubenswrapper[4757]: I1006 13:42:09.706585 4757 scope.go:117] "RemoveContainer" containerID="202abae68bdcaf3404581fffe7dd42aed901ddbb8d609d59b579f023907598cd" Oct 06 13:42:09 crc kubenswrapper[4757]: E1006 13:42:09.706808 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"202abae68bdcaf3404581fffe7dd42aed901ddbb8d609d59b579f023907598cd\": container with ID starting with 202abae68bdcaf3404581fffe7dd42aed901ddbb8d609d59b579f023907598cd not found: ID does not exist" containerID="202abae68bdcaf3404581fffe7dd42aed901ddbb8d609d59b579f023907598cd" Oct 06 13:42:09 crc kubenswrapper[4757]: I1006 13:42:09.706831 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"202abae68bdcaf3404581fffe7dd42aed901ddbb8d609d59b579f023907598cd"} err="failed to get container status \"202abae68bdcaf3404581fffe7dd42aed901ddbb8d609d59b579f023907598cd\": rpc error: code = NotFound desc = could not find container \"202abae68bdcaf3404581fffe7dd42aed901ddbb8d609d59b579f023907598cd\": container with ID starting with 202abae68bdcaf3404581fffe7dd42aed901ddbb8d609d59b579f023907598cd not found: ID does not exist" Oct 06 13:42:09 crc kubenswrapper[4757]: I1006 13:42:09.706848 4757 scope.go:117] "RemoveContainer" containerID="5db16d51b5b0e6082ec1d269772bef8fadd2ebc69728f38100b8d1979573c66f" Oct 06 13:42:09 crc kubenswrapper[4757]: E1006 13:42:09.707210 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5db16d51b5b0e6082ec1d269772bef8fadd2ebc69728f38100b8d1979573c66f\": container with ID starting with 5db16d51b5b0e6082ec1d269772bef8fadd2ebc69728f38100b8d1979573c66f not found: ID does not exist" containerID="5db16d51b5b0e6082ec1d269772bef8fadd2ebc69728f38100b8d1979573c66f" Oct 06 13:42:09 crc kubenswrapper[4757]: I1006 13:42:09.707278 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5db16d51b5b0e6082ec1d269772bef8fadd2ebc69728f38100b8d1979573c66f"} err="failed to get container status \"5db16d51b5b0e6082ec1d269772bef8fadd2ebc69728f38100b8d1979573c66f\": rpc error: code = NotFound desc = could not find container \"5db16d51b5b0e6082ec1d269772bef8fadd2ebc69728f38100b8d1979573c66f\": container with ID starting with 5db16d51b5b0e6082ec1d269772bef8fadd2ebc69728f38100b8d1979573c66f not found: ID does not exist" Oct 06 13:42:10 crc kubenswrapper[4757]: I1006 13:42:10.190185 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="58fe806e-025a-4c3f-97d7-b334e810ef16" path="/var/lib/kubelet/pods/58fe806e-025a-4c3f-97d7-b334e810ef16/volumes" Oct 06 13:42:10 crc kubenswrapper[4757]: I1006 13:42:10.190942 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6e388314-efd0-452c-9cca-f8634f501514" path="/var/lib/kubelet/pods/6e388314-efd0-452c-9cca-f8634f501514/volumes" Oct 06 13:42:13 crc kubenswrapper[4757]: I1006 13:42:13.019208 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-d8wfp" Oct 06 13:42:14 crc kubenswrapper[4757]: I1006 13:42:14.025197 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-d8wfp"] Oct 06 13:42:14 crc kubenswrapper[4757]: I1006 13:42:14.025737 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-d8wfp" podUID="f9a9e7aa-5ab4-4877-b808-1097b5c27155" containerName="registry-server" containerID="cri-o://5a35be82c5d436c292b43e65099b1671b8bd4348d690d2bf099876a27e8b199f" gracePeriod=2 Oct 06 13:42:14 crc kubenswrapper[4757]: E1006 13:42:14.154825 4757 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf9a9e7aa_5ab4_4877_b808_1097b5c27155.slice/crio-5a35be82c5d436c292b43e65099b1671b8bd4348d690d2bf099876a27e8b199f.scope\": RecentStats: unable to find data in memory cache]" Oct 06 13:42:14 crc kubenswrapper[4757]: I1006 13:42:14.371662 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-d8wfp" Oct 06 13:42:14 crc kubenswrapper[4757]: I1006 13:42:14.489670 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2vpxh\" (UniqueName: \"kubernetes.io/projected/f9a9e7aa-5ab4-4877-b808-1097b5c27155-kube-api-access-2vpxh\") pod \"f9a9e7aa-5ab4-4877-b808-1097b5c27155\" (UID: \"f9a9e7aa-5ab4-4877-b808-1097b5c27155\") " Oct 06 13:42:14 crc kubenswrapper[4757]: I1006 13:42:14.489712 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f9a9e7aa-5ab4-4877-b808-1097b5c27155-utilities\") pod \"f9a9e7aa-5ab4-4877-b808-1097b5c27155\" (UID: \"f9a9e7aa-5ab4-4877-b808-1097b5c27155\") " Oct 06 13:42:14 crc kubenswrapper[4757]: I1006 13:42:14.489748 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f9a9e7aa-5ab4-4877-b808-1097b5c27155-catalog-content\") pod \"f9a9e7aa-5ab4-4877-b808-1097b5c27155\" (UID: \"f9a9e7aa-5ab4-4877-b808-1097b5c27155\") " Oct 06 13:42:14 crc kubenswrapper[4757]: I1006 13:42:14.491057 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f9a9e7aa-5ab4-4877-b808-1097b5c27155-utilities" (OuterVolumeSpecName: "utilities") pod "f9a9e7aa-5ab4-4877-b808-1097b5c27155" (UID: "f9a9e7aa-5ab4-4877-b808-1097b5c27155"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 13:42:14 crc kubenswrapper[4757]: I1006 13:42:14.498410 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f9a9e7aa-5ab4-4877-b808-1097b5c27155-kube-api-access-2vpxh" (OuterVolumeSpecName: "kube-api-access-2vpxh") pod "f9a9e7aa-5ab4-4877-b808-1097b5c27155" (UID: "f9a9e7aa-5ab4-4877-b808-1097b5c27155"). InnerVolumeSpecName "kube-api-access-2vpxh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:42:14 crc kubenswrapper[4757]: I1006 13:42:14.551521 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f9a9e7aa-5ab4-4877-b808-1097b5c27155-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f9a9e7aa-5ab4-4877-b808-1097b5c27155" (UID: "f9a9e7aa-5ab4-4877-b808-1097b5c27155"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 13:42:14 crc kubenswrapper[4757]: I1006 13:42:14.591024 4757 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f9a9e7aa-5ab4-4877-b808-1097b5c27155-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 06 13:42:14 crc kubenswrapper[4757]: I1006 13:42:14.591173 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2vpxh\" (UniqueName: \"kubernetes.io/projected/f9a9e7aa-5ab4-4877-b808-1097b5c27155-kube-api-access-2vpxh\") on node \"crc\" DevicePath \"\"" Oct 06 13:42:14 crc kubenswrapper[4757]: I1006 13:42:14.591209 4757 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f9a9e7aa-5ab4-4877-b808-1097b5c27155-utilities\") on node \"crc\" DevicePath \"\"" Oct 06 13:42:14 crc kubenswrapper[4757]: I1006 13:42:14.638936 4757 generic.go:334] "Generic (PLEG): container finished" podID="f9a9e7aa-5ab4-4877-b808-1097b5c27155" containerID="5a35be82c5d436c292b43e65099b1671b8bd4348d690d2bf099876a27e8b199f" exitCode=0 Oct 06 13:42:14 crc kubenswrapper[4757]: I1006 13:42:14.638995 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-d8wfp" event={"ID":"f9a9e7aa-5ab4-4877-b808-1097b5c27155","Type":"ContainerDied","Data":"5a35be82c5d436c292b43e65099b1671b8bd4348d690d2bf099876a27e8b199f"} Oct 06 13:42:14 crc kubenswrapper[4757]: I1006 13:42:14.639057 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-d8wfp" Oct 06 13:42:14 crc kubenswrapper[4757]: I1006 13:42:14.639081 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-d8wfp" event={"ID":"f9a9e7aa-5ab4-4877-b808-1097b5c27155","Type":"ContainerDied","Data":"d577142a1cec83551904a69309a0f9a4d5c5e36559d588b4cc957b5624d6951e"} Oct 06 13:42:14 crc kubenswrapper[4757]: I1006 13:42:14.639137 4757 scope.go:117] "RemoveContainer" containerID="5a35be82c5d436c292b43e65099b1671b8bd4348d690d2bf099876a27e8b199f" Oct 06 13:42:14 crc kubenswrapper[4757]: I1006 13:42:14.669022 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-d8wfp"] Oct 06 13:42:14 crc kubenswrapper[4757]: I1006 13:42:14.672298 4757 scope.go:117] "RemoveContainer" containerID="2e338616a77d5fc278f3caf94d4e79310d7097fa200ac3848e4041a07a393115" Oct 06 13:42:14 crc kubenswrapper[4757]: I1006 13:42:14.679618 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-d8wfp"] Oct 06 13:42:14 crc kubenswrapper[4757]: I1006 13:42:14.698025 4757 scope.go:117] "RemoveContainer" containerID="5e4bba0b49eaa738aededcd7be03312fff450727bb0cd7e3959199e1f601f501" Oct 06 13:42:14 crc kubenswrapper[4757]: I1006 13:42:14.716563 4757 scope.go:117] "RemoveContainer" containerID="5a35be82c5d436c292b43e65099b1671b8bd4348d690d2bf099876a27e8b199f" Oct 06 13:42:14 crc kubenswrapper[4757]: E1006 13:42:14.717294 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5a35be82c5d436c292b43e65099b1671b8bd4348d690d2bf099876a27e8b199f\": container with ID starting with 5a35be82c5d436c292b43e65099b1671b8bd4348d690d2bf099876a27e8b199f not found: ID does not exist" containerID="5a35be82c5d436c292b43e65099b1671b8bd4348d690d2bf099876a27e8b199f" Oct 06 13:42:14 crc kubenswrapper[4757]: I1006 13:42:14.717350 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5a35be82c5d436c292b43e65099b1671b8bd4348d690d2bf099876a27e8b199f"} err="failed to get container status \"5a35be82c5d436c292b43e65099b1671b8bd4348d690d2bf099876a27e8b199f\": rpc error: code = NotFound desc = could not find container \"5a35be82c5d436c292b43e65099b1671b8bd4348d690d2bf099876a27e8b199f\": container with ID starting with 5a35be82c5d436c292b43e65099b1671b8bd4348d690d2bf099876a27e8b199f not found: ID does not exist" Oct 06 13:42:14 crc kubenswrapper[4757]: I1006 13:42:14.717389 4757 scope.go:117] "RemoveContainer" containerID="2e338616a77d5fc278f3caf94d4e79310d7097fa200ac3848e4041a07a393115" Oct 06 13:42:14 crc kubenswrapper[4757]: E1006 13:42:14.717717 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2e338616a77d5fc278f3caf94d4e79310d7097fa200ac3848e4041a07a393115\": container with ID starting with 2e338616a77d5fc278f3caf94d4e79310d7097fa200ac3848e4041a07a393115 not found: ID does not exist" containerID="2e338616a77d5fc278f3caf94d4e79310d7097fa200ac3848e4041a07a393115" Oct 06 13:42:14 crc kubenswrapper[4757]: I1006 13:42:14.717755 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2e338616a77d5fc278f3caf94d4e79310d7097fa200ac3848e4041a07a393115"} err="failed to get container status \"2e338616a77d5fc278f3caf94d4e79310d7097fa200ac3848e4041a07a393115\": rpc error: code = NotFound desc = could not find container \"2e338616a77d5fc278f3caf94d4e79310d7097fa200ac3848e4041a07a393115\": container with ID starting with 2e338616a77d5fc278f3caf94d4e79310d7097fa200ac3848e4041a07a393115 not found: ID does not exist" Oct 06 13:42:14 crc kubenswrapper[4757]: I1006 13:42:14.717783 4757 scope.go:117] "RemoveContainer" containerID="5e4bba0b49eaa738aededcd7be03312fff450727bb0cd7e3959199e1f601f501" Oct 06 13:42:14 crc kubenswrapper[4757]: E1006 13:42:14.718378 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5e4bba0b49eaa738aededcd7be03312fff450727bb0cd7e3959199e1f601f501\": container with ID starting with 5e4bba0b49eaa738aededcd7be03312fff450727bb0cd7e3959199e1f601f501 not found: ID does not exist" containerID="5e4bba0b49eaa738aededcd7be03312fff450727bb0cd7e3959199e1f601f501" Oct 06 13:42:14 crc kubenswrapper[4757]: I1006 13:42:14.718414 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5e4bba0b49eaa738aededcd7be03312fff450727bb0cd7e3959199e1f601f501"} err="failed to get container status \"5e4bba0b49eaa738aededcd7be03312fff450727bb0cd7e3959199e1f601f501\": rpc error: code = NotFound desc = could not find container \"5e4bba0b49eaa738aededcd7be03312fff450727bb0cd7e3959199e1f601f501\": container with ID starting with 5e4bba0b49eaa738aededcd7be03312fff450727bb0cd7e3959199e1f601f501 not found: ID does not exist" Oct 06 13:42:16 crc kubenswrapper[4757]: I1006 13:42:16.188524 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f9a9e7aa-5ab4-4877-b808-1097b5c27155" path="/var/lib/kubelet/pods/f9a9e7aa-5ab4-4877-b808-1097b5c27155/volumes" Oct 06 13:42:24 crc kubenswrapper[4757]: I1006 13:42:24.516975 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-zcwgr" podUID="8db12020-2fce-47b5-936e-e792b08976f0" containerName="oauth-openshift" containerID="cri-o://dbd012bd338afbe873850b01652e418f44073d9581ac20aff8803307728d0617" gracePeriod=15 Oct 06 13:42:24 crc kubenswrapper[4757]: I1006 13:42:24.698680 4757 generic.go:334] "Generic (PLEG): container finished" podID="8db12020-2fce-47b5-936e-e792b08976f0" containerID="dbd012bd338afbe873850b01652e418f44073d9581ac20aff8803307728d0617" exitCode=0 Oct 06 13:42:24 crc kubenswrapper[4757]: I1006 13:42:24.698732 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-zcwgr" event={"ID":"8db12020-2fce-47b5-936e-e792b08976f0","Type":"ContainerDied","Data":"dbd012bd338afbe873850b01652e418f44073d9581ac20aff8803307728d0617"} Oct 06 13:42:24 crc kubenswrapper[4757]: I1006 13:42:24.900703 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-zcwgr" Oct 06 13:42:24 crc kubenswrapper[4757]: I1006 13:42:24.934782 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-79bf7467fb-xqzbm"] Oct 06 13:42:24 crc kubenswrapper[4757]: E1006 13:42:24.935069 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="58fe806e-025a-4c3f-97d7-b334e810ef16" containerName="extract-utilities" Oct 06 13:42:24 crc kubenswrapper[4757]: I1006 13:42:24.935092 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="58fe806e-025a-4c3f-97d7-b334e810ef16" containerName="extract-utilities" Oct 06 13:42:24 crc kubenswrapper[4757]: E1006 13:42:24.935123 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="69959a98-24ab-49d0-b774-a98f5ebe70e3" containerName="registry-server" Oct 06 13:42:24 crc kubenswrapper[4757]: I1006 13:42:24.935132 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="69959a98-24ab-49d0-b774-a98f5ebe70e3" containerName="registry-server" Oct 06 13:42:24 crc kubenswrapper[4757]: E1006 13:42:24.935147 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="69959a98-24ab-49d0-b774-a98f5ebe70e3" containerName="extract-content" Oct 06 13:42:24 crc kubenswrapper[4757]: I1006 13:42:24.935155 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="69959a98-24ab-49d0-b774-a98f5ebe70e3" containerName="extract-content" Oct 06 13:42:24 crc kubenswrapper[4757]: E1006 13:42:24.935167 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="69959a98-24ab-49d0-b774-a98f5ebe70e3" containerName="extract-utilities" Oct 06 13:42:24 crc kubenswrapper[4757]: I1006 13:42:24.935175 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="69959a98-24ab-49d0-b774-a98f5ebe70e3" containerName="extract-utilities" Oct 06 13:42:24 crc kubenswrapper[4757]: E1006 13:42:24.935185 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9a9e7aa-5ab4-4877-b808-1097b5c27155" containerName="extract-utilities" Oct 06 13:42:24 crc kubenswrapper[4757]: I1006 13:42:24.935195 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9a9e7aa-5ab4-4877-b808-1097b5c27155" containerName="extract-utilities" Oct 06 13:42:24 crc kubenswrapper[4757]: E1006 13:42:24.935206 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9a9e7aa-5ab4-4877-b808-1097b5c27155" containerName="registry-server" Oct 06 13:42:24 crc kubenswrapper[4757]: I1006 13:42:24.935215 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9a9e7aa-5ab4-4877-b808-1097b5c27155" containerName="registry-server" Oct 06 13:42:24 crc kubenswrapper[4757]: E1006 13:42:24.935225 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8db12020-2fce-47b5-936e-e792b08976f0" containerName="oauth-openshift" Oct 06 13:42:24 crc kubenswrapper[4757]: I1006 13:42:24.935233 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="8db12020-2fce-47b5-936e-e792b08976f0" containerName="oauth-openshift" Oct 06 13:42:24 crc kubenswrapper[4757]: E1006 13:42:24.935244 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9a9e7aa-5ab4-4877-b808-1097b5c27155" containerName="extract-content" Oct 06 13:42:24 crc kubenswrapper[4757]: I1006 13:42:24.935252 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9a9e7aa-5ab4-4877-b808-1097b5c27155" containerName="extract-content" Oct 06 13:42:24 crc kubenswrapper[4757]: E1006 13:42:24.935261 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="58fe806e-025a-4c3f-97d7-b334e810ef16" containerName="extract-content" Oct 06 13:42:24 crc kubenswrapper[4757]: I1006 13:42:24.935269 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="58fe806e-025a-4c3f-97d7-b334e810ef16" containerName="extract-content" Oct 06 13:42:24 crc kubenswrapper[4757]: E1006 13:42:24.935278 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e388314-efd0-452c-9cca-f8634f501514" containerName="extract-utilities" Oct 06 13:42:24 crc kubenswrapper[4757]: I1006 13:42:24.935286 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e388314-efd0-452c-9cca-f8634f501514" containerName="extract-utilities" Oct 06 13:42:24 crc kubenswrapper[4757]: E1006 13:42:24.935297 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e388314-efd0-452c-9cca-f8634f501514" containerName="extract-content" Oct 06 13:42:24 crc kubenswrapper[4757]: I1006 13:42:24.935304 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e388314-efd0-452c-9cca-f8634f501514" containerName="extract-content" Oct 06 13:42:24 crc kubenswrapper[4757]: E1006 13:42:24.935314 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e388314-efd0-452c-9cca-f8634f501514" containerName="registry-server" Oct 06 13:42:24 crc kubenswrapper[4757]: I1006 13:42:24.935321 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e388314-efd0-452c-9cca-f8634f501514" containerName="registry-server" Oct 06 13:42:24 crc kubenswrapper[4757]: E1006 13:42:24.935339 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="58fe806e-025a-4c3f-97d7-b334e810ef16" containerName="registry-server" Oct 06 13:42:24 crc kubenswrapper[4757]: I1006 13:42:24.935346 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="58fe806e-025a-4c3f-97d7-b334e810ef16" containerName="registry-server" Oct 06 13:42:24 crc kubenswrapper[4757]: E1006 13:42:24.935355 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="77dba3de-14f0-4ae5-8fe3-063537d1c5a1" containerName="pruner" Oct 06 13:42:24 crc kubenswrapper[4757]: I1006 13:42:24.935362 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="77dba3de-14f0-4ae5-8fe3-063537d1c5a1" containerName="pruner" Oct 06 13:42:24 crc kubenswrapper[4757]: I1006 13:42:24.935472 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="8db12020-2fce-47b5-936e-e792b08976f0" containerName="oauth-openshift" Oct 06 13:42:24 crc kubenswrapper[4757]: I1006 13:42:24.935486 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="58fe806e-025a-4c3f-97d7-b334e810ef16" containerName="registry-server" Oct 06 13:42:24 crc kubenswrapper[4757]: I1006 13:42:24.935496 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="69959a98-24ab-49d0-b774-a98f5ebe70e3" containerName="registry-server" Oct 06 13:42:24 crc kubenswrapper[4757]: I1006 13:42:24.935509 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="f9a9e7aa-5ab4-4877-b808-1097b5c27155" containerName="registry-server" Oct 06 13:42:24 crc kubenswrapper[4757]: I1006 13:42:24.935519 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="6e388314-efd0-452c-9cca-f8634f501514" containerName="registry-server" Oct 06 13:42:24 crc kubenswrapper[4757]: I1006 13:42:24.935527 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="77dba3de-14f0-4ae5-8fe3-063537d1c5a1" containerName="pruner" Oct 06 13:42:24 crc kubenswrapper[4757]: I1006 13:42:24.936021 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-79bf7467fb-xqzbm" Oct 06 13:42:24 crc kubenswrapper[4757]: I1006 13:42:24.955497 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-79bf7467fb-xqzbm"] Oct 06 13:42:25 crc kubenswrapper[4757]: I1006 13:42:25.027911 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/8db12020-2fce-47b5-936e-e792b08976f0-audit-dir\") pod \"8db12020-2fce-47b5-936e-e792b08976f0\" (UID: \"8db12020-2fce-47b5-936e-e792b08976f0\") " Oct 06 13:42:25 crc kubenswrapper[4757]: I1006 13:42:25.028009 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/8db12020-2fce-47b5-936e-e792b08976f0-v4-0-config-user-template-error\") pod \"8db12020-2fce-47b5-936e-e792b08976f0\" (UID: \"8db12020-2fce-47b5-936e-e792b08976f0\") " Oct 06 13:42:25 crc kubenswrapper[4757]: I1006 13:42:25.028039 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/8db12020-2fce-47b5-936e-e792b08976f0-v4-0-config-system-ocp-branding-template\") pod \"8db12020-2fce-47b5-936e-e792b08976f0\" (UID: \"8db12020-2fce-47b5-936e-e792b08976f0\") " Oct 06 13:42:25 crc kubenswrapper[4757]: I1006 13:42:25.028024 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8db12020-2fce-47b5-936e-e792b08976f0-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "8db12020-2fce-47b5-936e-e792b08976f0" (UID: "8db12020-2fce-47b5-936e-e792b08976f0"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 06 13:42:25 crc kubenswrapper[4757]: I1006 13:42:25.028066 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/8db12020-2fce-47b5-936e-e792b08976f0-v4-0-config-system-service-ca\") pod \"8db12020-2fce-47b5-936e-e792b08976f0\" (UID: \"8db12020-2fce-47b5-936e-e792b08976f0\") " Oct 06 13:42:25 crc kubenswrapper[4757]: I1006 13:42:25.028114 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dvnrm\" (UniqueName: \"kubernetes.io/projected/8db12020-2fce-47b5-936e-e792b08976f0-kube-api-access-dvnrm\") pod \"8db12020-2fce-47b5-936e-e792b08976f0\" (UID: \"8db12020-2fce-47b5-936e-e792b08976f0\") " Oct 06 13:42:25 crc kubenswrapper[4757]: I1006 13:42:25.028149 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/8db12020-2fce-47b5-936e-e792b08976f0-v4-0-config-system-serving-cert\") pod \"8db12020-2fce-47b5-936e-e792b08976f0\" (UID: \"8db12020-2fce-47b5-936e-e792b08976f0\") " Oct 06 13:42:25 crc kubenswrapper[4757]: I1006 13:42:25.028172 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/8db12020-2fce-47b5-936e-e792b08976f0-audit-policies\") pod \"8db12020-2fce-47b5-936e-e792b08976f0\" (UID: \"8db12020-2fce-47b5-936e-e792b08976f0\") " Oct 06 13:42:25 crc kubenswrapper[4757]: I1006 13:42:25.028210 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/8db12020-2fce-47b5-936e-e792b08976f0-v4-0-config-system-router-certs\") pod \"8db12020-2fce-47b5-936e-e792b08976f0\" (UID: \"8db12020-2fce-47b5-936e-e792b08976f0\") " Oct 06 13:42:25 crc kubenswrapper[4757]: I1006 13:42:25.028233 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/8db12020-2fce-47b5-936e-e792b08976f0-v4-0-config-system-cliconfig\") pod \"8db12020-2fce-47b5-936e-e792b08976f0\" (UID: \"8db12020-2fce-47b5-936e-e792b08976f0\") " Oct 06 13:42:25 crc kubenswrapper[4757]: I1006 13:42:25.028250 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/8db12020-2fce-47b5-936e-e792b08976f0-v4-0-config-system-session\") pod \"8db12020-2fce-47b5-936e-e792b08976f0\" (UID: \"8db12020-2fce-47b5-936e-e792b08976f0\") " Oct 06 13:42:25 crc kubenswrapper[4757]: I1006 13:42:25.028279 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/8db12020-2fce-47b5-936e-e792b08976f0-v4-0-config-user-template-login\") pod \"8db12020-2fce-47b5-936e-e792b08976f0\" (UID: \"8db12020-2fce-47b5-936e-e792b08976f0\") " Oct 06 13:42:25 crc kubenswrapper[4757]: I1006 13:42:25.028333 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8db12020-2fce-47b5-936e-e792b08976f0-v4-0-config-system-trusted-ca-bundle\") pod \"8db12020-2fce-47b5-936e-e792b08976f0\" (UID: \"8db12020-2fce-47b5-936e-e792b08976f0\") " Oct 06 13:42:25 crc kubenswrapper[4757]: I1006 13:42:25.028383 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/8db12020-2fce-47b5-936e-e792b08976f0-v4-0-config-user-idp-0-file-data\") pod \"8db12020-2fce-47b5-936e-e792b08976f0\" (UID: \"8db12020-2fce-47b5-936e-e792b08976f0\") " Oct 06 13:42:25 crc kubenswrapper[4757]: I1006 13:42:25.028412 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/8db12020-2fce-47b5-936e-e792b08976f0-v4-0-config-user-template-provider-selection\") pod \"8db12020-2fce-47b5-936e-e792b08976f0\" (UID: \"8db12020-2fce-47b5-936e-e792b08976f0\") " Oct 06 13:42:25 crc kubenswrapper[4757]: I1006 13:42:25.028573 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/6a4712cb-3093-48ac-a85f-e0b06ed40698-v4-0-config-system-router-certs\") pod \"oauth-openshift-79bf7467fb-xqzbm\" (UID: \"6a4712cb-3093-48ac-a85f-e0b06ed40698\") " pod="openshift-authentication/oauth-openshift-79bf7467fb-xqzbm" Oct 06 13:42:25 crc kubenswrapper[4757]: I1006 13:42:25.028597 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bprrr\" (UniqueName: \"kubernetes.io/projected/6a4712cb-3093-48ac-a85f-e0b06ed40698-kube-api-access-bprrr\") pod \"oauth-openshift-79bf7467fb-xqzbm\" (UID: \"6a4712cb-3093-48ac-a85f-e0b06ed40698\") " pod="openshift-authentication/oauth-openshift-79bf7467fb-xqzbm" Oct 06 13:42:25 crc kubenswrapper[4757]: I1006 13:42:25.028618 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/6a4712cb-3093-48ac-a85f-e0b06ed40698-v4-0-config-system-session\") pod \"oauth-openshift-79bf7467fb-xqzbm\" (UID: \"6a4712cb-3093-48ac-a85f-e0b06ed40698\") " pod="openshift-authentication/oauth-openshift-79bf7467fb-xqzbm" Oct 06 13:42:25 crc kubenswrapper[4757]: I1006 13:42:25.028636 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/6a4712cb-3093-48ac-a85f-e0b06ed40698-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-79bf7467fb-xqzbm\" (UID: \"6a4712cb-3093-48ac-a85f-e0b06ed40698\") " pod="openshift-authentication/oauth-openshift-79bf7467fb-xqzbm" Oct 06 13:42:25 crc kubenswrapper[4757]: I1006 13:42:25.028658 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/6a4712cb-3093-48ac-a85f-e0b06ed40698-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-79bf7467fb-xqzbm\" (UID: \"6a4712cb-3093-48ac-a85f-e0b06ed40698\") " pod="openshift-authentication/oauth-openshift-79bf7467fb-xqzbm" Oct 06 13:42:25 crc kubenswrapper[4757]: I1006 13:42:25.028685 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/6a4712cb-3093-48ac-a85f-e0b06ed40698-v4-0-config-user-template-error\") pod \"oauth-openshift-79bf7467fb-xqzbm\" (UID: \"6a4712cb-3093-48ac-a85f-e0b06ed40698\") " pod="openshift-authentication/oauth-openshift-79bf7467fb-xqzbm" Oct 06 13:42:25 crc kubenswrapper[4757]: I1006 13:42:25.028713 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/6a4712cb-3093-48ac-a85f-e0b06ed40698-v4-0-config-user-template-login\") pod \"oauth-openshift-79bf7467fb-xqzbm\" (UID: \"6a4712cb-3093-48ac-a85f-e0b06ed40698\") " pod="openshift-authentication/oauth-openshift-79bf7467fb-xqzbm" Oct 06 13:42:25 crc kubenswrapper[4757]: I1006 13:42:25.028735 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/6a4712cb-3093-48ac-a85f-e0b06ed40698-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-79bf7467fb-xqzbm\" (UID: \"6a4712cb-3093-48ac-a85f-e0b06ed40698\") " pod="openshift-authentication/oauth-openshift-79bf7467fb-xqzbm" Oct 06 13:42:25 crc kubenswrapper[4757]: I1006 13:42:25.028754 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/6a4712cb-3093-48ac-a85f-e0b06ed40698-v4-0-config-system-serving-cert\") pod \"oauth-openshift-79bf7467fb-xqzbm\" (UID: \"6a4712cb-3093-48ac-a85f-e0b06ed40698\") " pod="openshift-authentication/oauth-openshift-79bf7467fb-xqzbm" Oct 06 13:42:25 crc kubenswrapper[4757]: I1006 13:42:25.028771 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/6a4712cb-3093-48ac-a85f-e0b06ed40698-audit-policies\") pod \"oauth-openshift-79bf7467fb-xqzbm\" (UID: \"6a4712cb-3093-48ac-a85f-e0b06ed40698\") " pod="openshift-authentication/oauth-openshift-79bf7467fb-xqzbm" Oct 06 13:42:25 crc kubenswrapper[4757]: I1006 13:42:25.028786 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6a4712cb-3093-48ac-a85f-e0b06ed40698-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-79bf7467fb-xqzbm\" (UID: \"6a4712cb-3093-48ac-a85f-e0b06ed40698\") " pod="openshift-authentication/oauth-openshift-79bf7467fb-xqzbm" Oct 06 13:42:25 crc kubenswrapper[4757]: I1006 13:42:25.028803 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/6a4712cb-3093-48ac-a85f-e0b06ed40698-audit-dir\") pod \"oauth-openshift-79bf7467fb-xqzbm\" (UID: \"6a4712cb-3093-48ac-a85f-e0b06ed40698\") " pod="openshift-authentication/oauth-openshift-79bf7467fb-xqzbm" Oct 06 13:42:25 crc kubenswrapper[4757]: I1006 13:42:25.029313 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/6a4712cb-3093-48ac-a85f-e0b06ed40698-v4-0-config-system-service-ca\") pod \"oauth-openshift-79bf7467fb-xqzbm\" (UID: \"6a4712cb-3093-48ac-a85f-e0b06ed40698\") " pod="openshift-authentication/oauth-openshift-79bf7467fb-xqzbm" Oct 06 13:42:25 crc kubenswrapper[4757]: I1006 13:42:25.029348 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/6a4712cb-3093-48ac-a85f-e0b06ed40698-v4-0-config-system-cliconfig\") pod \"oauth-openshift-79bf7467fb-xqzbm\" (UID: \"6a4712cb-3093-48ac-a85f-e0b06ed40698\") " pod="openshift-authentication/oauth-openshift-79bf7467fb-xqzbm" Oct 06 13:42:25 crc kubenswrapper[4757]: I1006 13:42:25.029404 4757 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/8db12020-2fce-47b5-936e-e792b08976f0-audit-dir\") on node \"crc\" DevicePath \"\"" Oct 06 13:42:25 crc kubenswrapper[4757]: I1006 13:42:25.031365 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8db12020-2fce-47b5-936e-e792b08976f0-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "8db12020-2fce-47b5-936e-e792b08976f0" (UID: "8db12020-2fce-47b5-936e-e792b08976f0"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:42:25 crc kubenswrapper[4757]: I1006 13:42:25.031839 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8db12020-2fce-47b5-936e-e792b08976f0-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "8db12020-2fce-47b5-936e-e792b08976f0" (UID: "8db12020-2fce-47b5-936e-e792b08976f0"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:42:25 crc kubenswrapper[4757]: I1006 13:42:25.031873 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8db12020-2fce-47b5-936e-e792b08976f0-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "8db12020-2fce-47b5-936e-e792b08976f0" (UID: "8db12020-2fce-47b5-936e-e792b08976f0"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:42:25 crc kubenswrapper[4757]: I1006 13:42:25.031940 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8db12020-2fce-47b5-936e-e792b08976f0-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "8db12020-2fce-47b5-936e-e792b08976f0" (UID: "8db12020-2fce-47b5-936e-e792b08976f0"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:42:25 crc kubenswrapper[4757]: I1006 13:42:25.037040 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8db12020-2fce-47b5-936e-e792b08976f0-kube-api-access-dvnrm" (OuterVolumeSpecName: "kube-api-access-dvnrm") pod "8db12020-2fce-47b5-936e-e792b08976f0" (UID: "8db12020-2fce-47b5-936e-e792b08976f0"). InnerVolumeSpecName "kube-api-access-dvnrm". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:42:25 crc kubenswrapper[4757]: I1006 13:42:25.048505 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8db12020-2fce-47b5-936e-e792b08976f0-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "8db12020-2fce-47b5-936e-e792b08976f0" (UID: "8db12020-2fce-47b5-936e-e792b08976f0"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:42:25 crc kubenswrapper[4757]: I1006 13:42:25.051636 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8db12020-2fce-47b5-936e-e792b08976f0-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "8db12020-2fce-47b5-936e-e792b08976f0" (UID: "8db12020-2fce-47b5-936e-e792b08976f0"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:42:25 crc kubenswrapper[4757]: I1006 13:42:25.055298 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8db12020-2fce-47b5-936e-e792b08976f0-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "8db12020-2fce-47b5-936e-e792b08976f0" (UID: "8db12020-2fce-47b5-936e-e792b08976f0"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:42:25 crc kubenswrapper[4757]: I1006 13:42:25.055478 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8db12020-2fce-47b5-936e-e792b08976f0-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "8db12020-2fce-47b5-936e-e792b08976f0" (UID: "8db12020-2fce-47b5-936e-e792b08976f0"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:42:25 crc kubenswrapper[4757]: I1006 13:42:25.055713 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8db12020-2fce-47b5-936e-e792b08976f0-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "8db12020-2fce-47b5-936e-e792b08976f0" (UID: "8db12020-2fce-47b5-936e-e792b08976f0"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:42:25 crc kubenswrapper[4757]: I1006 13:42:25.055983 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8db12020-2fce-47b5-936e-e792b08976f0-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "8db12020-2fce-47b5-936e-e792b08976f0" (UID: "8db12020-2fce-47b5-936e-e792b08976f0"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:42:25 crc kubenswrapper[4757]: I1006 13:42:25.056281 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8db12020-2fce-47b5-936e-e792b08976f0-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "8db12020-2fce-47b5-936e-e792b08976f0" (UID: "8db12020-2fce-47b5-936e-e792b08976f0"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:42:25 crc kubenswrapper[4757]: I1006 13:42:25.056379 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8db12020-2fce-47b5-936e-e792b08976f0-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "8db12020-2fce-47b5-936e-e792b08976f0" (UID: "8db12020-2fce-47b5-936e-e792b08976f0"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:42:25 crc kubenswrapper[4757]: I1006 13:42:25.131160 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/6a4712cb-3093-48ac-a85f-e0b06ed40698-v4-0-config-system-session\") pod \"oauth-openshift-79bf7467fb-xqzbm\" (UID: \"6a4712cb-3093-48ac-a85f-e0b06ed40698\") " pod="openshift-authentication/oauth-openshift-79bf7467fb-xqzbm" Oct 06 13:42:25 crc kubenswrapper[4757]: I1006 13:42:25.131242 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/6a4712cb-3093-48ac-a85f-e0b06ed40698-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-79bf7467fb-xqzbm\" (UID: \"6a4712cb-3093-48ac-a85f-e0b06ed40698\") " pod="openshift-authentication/oauth-openshift-79bf7467fb-xqzbm" Oct 06 13:42:25 crc kubenswrapper[4757]: I1006 13:42:25.131269 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/6a4712cb-3093-48ac-a85f-e0b06ed40698-v4-0-config-user-template-error\") pod \"oauth-openshift-79bf7467fb-xqzbm\" (UID: \"6a4712cb-3093-48ac-a85f-e0b06ed40698\") " pod="openshift-authentication/oauth-openshift-79bf7467fb-xqzbm" Oct 06 13:42:25 crc kubenswrapper[4757]: I1006 13:42:25.131297 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/6a4712cb-3093-48ac-a85f-e0b06ed40698-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-79bf7467fb-xqzbm\" (UID: \"6a4712cb-3093-48ac-a85f-e0b06ed40698\") " pod="openshift-authentication/oauth-openshift-79bf7467fb-xqzbm" Oct 06 13:42:25 crc kubenswrapper[4757]: I1006 13:42:25.132137 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/6a4712cb-3093-48ac-a85f-e0b06ed40698-v4-0-config-user-template-login\") pod \"oauth-openshift-79bf7467fb-xqzbm\" (UID: \"6a4712cb-3093-48ac-a85f-e0b06ed40698\") " pod="openshift-authentication/oauth-openshift-79bf7467fb-xqzbm" Oct 06 13:42:25 crc kubenswrapper[4757]: I1006 13:42:25.132780 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/6a4712cb-3093-48ac-a85f-e0b06ed40698-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-79bf7467fb-xqzbm\" (UID: \"6a4712cb-3093-48ac-a85f-e0b06ed40698\") " pod="openshift-authentication/oauth-openshift-79bf7467fb-xqzbm" Oct 06 13:42:25 crc kubenswrapper[4757]: I1006 13:42:25.132856 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/6a4712cb-3093-48ac-a85f-e0b06ed40698-v4-0-config-system-serving-cert\") pod \"oauth-openshift-79bf7467fb-xqzbm\" (UID: \"6a4712cb-3093-48ac-a85f-e0b06ed40698\") " pod="openshift-authentication/oauth-openshift-79bf7467fb-xqzbm" Oct 06 13:42:25 crc kubenswrapper[4757]: I1006 13:42:25.132885 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6a4712cb-3093-48ac-a85f-e0b06ed40698-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-79bf7467fb-xqzbm\" (UID: \"6a4712cb-3093-48ac-a85f-e0b06ed40698\") " pod="openshift-authentication/oauth-openshift-79bf7467fb-xqzbm" Oct 06 13:42:25 crc kubenswrapper[4757]: I1006 13:42:25.133034 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/6a4712cb-3093-48ac-a85f-e0b06ed40698-audit-policies\") pod \"oauth-openshift-79bf7467fb-xqzbm\" (UID: \"6a4712cb-3093-48ac-a85f-e0b06ed40698\") " pod="openshift-authentication/oauth-openshift-79bf7467fb-xqzbm" Oct 06 13:42:25 crc kubenswrapper[4757]: I1006 13:42:25.133066 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/6a4712cb-3093-48ac-a85f-e0b06ed40698-audit-dir\") pod \"oauth-openshift-79bf7467fb-xqzbm\" (UID: \"6a4712cb-3093-48ac-a85f-e0b06ed40698\") " pod="openshift-authentication/oauth-openshift-79bf7467fb-xqzbm" Oct 06 13:42:25 crc kubenswrapper[4757]: I1006 13:42:25.133106 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/6a4712cb-3093-48ac-a85f-e0b06ed40698-v4-0-config-system-service-ca\") pod \"oauth-openshift-79bf7467fb-xqzbm\" (UID: \"6a4712cb-3093-48ac-a85f-e0b06ed40698\") " pod="openshift-authentication/oauth-openshift-79bf7467fb-xqzbm" Oct 06 13:42:25 crc kubenswrapper[4757]: I1006 13:42:25.133138 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/6a4712cb-3093-48ac-a85f-e0b06ed40698-v4-0-config-system-cliconfig\") pod \"oauth-openshift-79bf7467fb-xqzbm\" (UID: \"6a4712cb-3093-48ac-a85f-e0b06ed40698\") " pod="openshift-authentication/oauth-openshift-79bf7467fb-xqzbm" Oct 06 13:42:25 crc kubenswrapper[4757]: I1006 13:42:25.133212 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/6a4712cb-3093-48ac-a85f-e0b06ed40698-v4-0-config-system-router-certs\") pod \"oauth-openshift-79bf7467fb-xqzbm\" (UID: \"6a4712cb-3093-48ac-a85f-e0b06ed40698\") " pod="openshift-authentication/oauth-openshift-79bf7467fb-xqzbm" Oct 06 13:42:25 crc kubenswrapper[4757]: I1006 13:42:25.133235 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bprrr\" (UniqueName: \"kubernetes.io/projected/6a4712cb-3093-48ac-a85f-e0b06ed40698-kube-api-access-bprrr\") pod \"oauth-openshift-79bf7467fb-xqzbm\" (UID: \"6a4712cb-3093-48ac-a85f-e0b06ed40698\") " pod="openshift-authentication/oauth-openshift-79bf7467fb-xqzbm" Oct 06 13:42:25 crc kubenswrapper[4757]: I1006 13:42:25.133285 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dvnrm\" (UniqueName: \"kubernetes.io/projected/8db12020-2fce-47b5-936e-e792b08976f0-kube-api-access-dvnrm\") on node \"crc\" DevicePath \"\"" Oct 06 13:42:25 crc kubenswrapper[4757]: I1006 13:42:25.133297 4757 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/8db12020-2fce-47b5-936e-e792b08976f0-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 06 13:42:25 crc kubenswrapper[4757]: I1006 13:42:25.133309 4757 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/8db12020-2fce-47b5-936e-e792b08976f0-audit-policies\") on node \"crc\" DevicePath \"\"" Oct 06 13:42:25 crc kubenswrapper[4757]: I1006 13:42:25.133324 4757 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/8db12020-2fce-47b5-936e-e792b08976f0-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Oct 06 13:42:25 crc kubenswrapper[4757]: I1006 13:42:25.133337 4757 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/8db12020-2fce-47b5-936e-e792b08976f0-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Oct 06 13:42:25 crc kubenswrapper[4757]: I1006 13:42:25.133348 4757 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/8db12020-2fce-47b5-936e-e792b08976f0-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Oct 06 13:42:25 crc kubenswrapper[4757]: I1006 13:42:25.133361 4757 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/8db12020-2fce-47b5-936e-e792b08976f0-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Oct 06 13:42:25 crc kubenswrapper[4757]: I1006 13:42:25.133372 4757 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8db12020-2fce-47b5-936e-e792b08976f0-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 06 13:42:25 crc kubenswrapper[4757]: I1006 13:42:25.133385 4757 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/8db12020-2fce-47b5-936e-e792b08976f0-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Oct 06 13:42:25 crc kubenswrapper[4757]: I1006 13:42:25.133398 4757 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/8db12020-2fce-47b5-936e-e792b08976f0-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Oct 06 13:42:25 crc kubenswrapper[4757]: I1006 13:42:25.133410 4757 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/8db12020-2fce-47b5-936e-e792b08976f0-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Oct 06 13:42:25 crc kubenswrapper[4757]: I1006 13:42:25.133423 4757 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/8db12020-2fce-47b5-936e-e792b08976f0-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Oct 06 13:42:25 crc kubenswrapper[4757]: I1006 13:42:25.133435 4757 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/8db12020-2fce-47b5-936e-e792b08976f0-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Oct 06 13:42:25 crc kubenswrapper[4757]: I1006 13:42:25.133987 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/6a4712cb-3093-48ac-a85f-e0b06ed40698-audit-policies\") pod \"oauth-openshift-79bf7467fb-xqzbm\" (UID: \"6a4712cb-3093-48ac-a85f-e0b06ed40698\") " pod="openshift-authentication/oauth-openshift-79bf7467fb-xqzbm" Oct 06 13:42:25 crc kubenswrapper[4757]: I1006 13:42:25.134041 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6a4712cb-3093-48ac-a85f-e0b06ed40698-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-79bf7467fb-xqzbm\" (UID: \"6a4712cb-3093-48ac-a85f-e0b06ed40698\") " pod="openshift-authentication/oauth-openshift-79bf7467fb-xqzbm" Oct 06 13:42:25 crc kubenswrapper[4757]: I1006 13:42:25.134071 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/6a4712cb-3093-48ac-a85f-e0b06ed40698-audit-dir\") pod \"oauth-openshift-79bf7467fb-xqzbm\" (UID: \"6a4712cb-3093-48ac-a85f-e0b06ed40698\") " pod="openshift-authentication/oauth-openshift-79bf7467fb-xqzbm" Oct 06 13:42:25 crc kubenswrapper[4757]: I1006 13:42:25.134508 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/6a4712cb-3093-48ac-a85f-e0b06ed40698-v4-0-config-system-service-ca\") pod \"oauth-openshift-79bf7467fb-xqzbm\" (UID: \"6a4712cb-3093-48ac-a85f-e0b06ed40698\") " pod="openshift-authentication/oauth-openshift-79bf7467fb-xqzbm" Oct 06 13:42:25 crc kubenswrapper[4757]: I1006 13:42:25.135356 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/6a4712cb-3093-48ac-a85f-e0b06ed40698-v4-0-config-system-cliconfig\") pod \"oauth-openshift-79bf7467fb-xqzbm\" (UID: \"6a4712cb-3093-48ac-a85f-e0b06ed40698\") " pod="openshift-authentication/oauth-openshift-79bf7467fb-xqzbm" Oct 06 13:42:25 crc kubenswrapper[4757]: I1006 13:42:25.135945 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/6a4712cb-3093-48ac-a85f-e0b06ed40698-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-79bf7467fb-xqzbm\" (UID: \"6a4712cb-3093-48ac-a85f-e0b06ed40698\") " pod="openshift-authentication/oauth-openshift-79bf7467fb-xqzbm" Oct 06 13:42:25 crc kubenswrapper[4757]: I1006 13:42:25.136014 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/6a4712cb-3093-48ac-a85f-e0b06ed40698-v4-0-config-system-session\") pod \"oauth-openshift-79bf7467fb-xqzbm\" (UID: \"6a4712cb-3093-48ac-a85f-e0b06ed40698\") " pod="openshift-authentication/oauth-openshift-79bf7467fb-xqzbm" Oct 06 13:42:25 crc kubenswrapper[4757]: I1006 13:42:25.136655 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/6a4712cb-3093-48ac-a85f-e0b06ed40698-v4-0-config-user-template-error\") pod \"oauth-openshift-79bf7467fb-xqzbm\" (UID: \"6a4712cb-3093-48ac-a85f-e0b06ed40698\") " pod="openshift-authentication/oauth-openshift-79bf7467fb-xqzbm" Oct 06 13:42:25 crc kubenswrapper[4757]: I1006 13:42:25.136765 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/6a4712cb-3093-48ac-a85f-e0b06ed40698-v4-0-config-user-template-login\") pod \"oauth-openshift-79bf7467fb-xqzbm\" (UID: \"6a4712cb-3093-48ac-a85f-e0b06ed40698\") " pod="openshift-authentication/oauth-openshift-79bf7467fb-xqzbm" Oct 06 13:42:25 crc kubenswrapper[4757]: I1006 13:42:25.137058 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/6a4712cb-3093-48ac-a85f-e0b06ed40698-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-79bf7467fb-xqzbm\" (UID: \"6a4712cb-3093-48ac-a85f-e0b06ed40698\") " pod="openshift-authentication/oauth-openshift-79bf7467fb-xqzbm" Oct 06 13:42:25 crc kubenswrapper[4757]: I1006 13:42:25.137478 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/6a4712cb-3093-48ac-a85f-e0b06ed40698-v4-0-config-system-router-certs\") pod \"oauth-openshift-79bf7467fb-xqzbm\" (UID: \"6a4712cb-3093-48ac-a85f-e0b06ed40698\") " pod="openshift-authentication/oauth-openshift-79bf7467fb-xqzbm" Oct 06 13:42:25 crc kubenswrapper[4757]: I1006 13:42:25.140003 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/6a4712cb-3093-48ac-a85f-e0b06ed40698-v4-0-config-system-serving-cert\") pod \"oauth-openshift-79bf7467fb-xqzbm\" (UID: \"6a4712cb-3093-48ac-a85f-e0b06ed40698\") " pod="openshift-authentication/oauth-openshift-79bf7467fb-xqzbm" Oct 06 13:42:25 crc kubenswrapper[4757]: I1006 13:42:25.141490 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/6a4712cb-3093-48ac-a85f-e0b06ed40698-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-79bf7467fb-xqzbm\" (UID: \"6a4712cb-3093-48ac-a85f-e0b06ed40698\") " pod="openshift-authentication/oauth-openshift-79bf7467fb-xqzbm" Oct 06 13:42:25 crc kubenswrapper[4757]: I1006 13:42:25.152208 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bprrr\" (UniqueName: \"kubernetes.io/projected/6a4712cb-3093-48ac-a85f-e0b06ed40698-kube-api-access-bprrr\") pod \"oauth-openshift-79bf7467fb-xqzbm\" (UID: \"6a4712cb-3093-48ac-a85f-e0b06ed40698\") " pod="openshift-authentication/oauth-openshift-79bf7467fb-xqzbm" Oct 06 13:42:25 crc kubenswrapper[4757]: I1006 13:42:25.248646 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-79bf7467fb-xqzbm" Oct 06 13:42:25 crc kubenswrapper[4757]: I1006 13:42:25.706469 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-zcwgr" event={"ID":"8db12020-2fce-47b5-936e-e792b08976f0","Type":"ContainerDied","Data":"f0a46bdc7f73d5935d4a725d0f8aeec87c27d59eca17f9f84a07fbd8c48f2579"} Oct 06 13:42:25 crc kubenswrapper[4757]: I1006 13:42:25.706533 4757 scope.go:117] "RemoveContainer" containerID="dbd012bd338afbe873850b01652e418f44073d9581ac20aff8803307728d0617" Oct 06 13:42:25 crc kubenswrapper[4757]: I1006 13:42:25.706541 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-zcwgr" Oct 06 13:42:25 crc kubenswrapper[4757]: I1006 13:42:25.716157 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-79bf7467fb-xqzbm"] Oct 06 13:42:25 crc kubenswrapper[4757]: W1006 13:42:25.724761 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6a4712cb_3093_48ac_a85f_e0b06ed40698.slice/crio-c5b55d035311f4f96481cb953ca83a6a78c4032dc4bab263189bc6abb0b8b333 WatchSource:0}: Error finding container c5b55d035311f4f96481cb953ca83a6a78c4032dc4bab263189bc6abb0b8b333: Status 404 returned error can't find the container with id c5b55d035311f4f96481cb953ca83a6a78c4032dc4bab263189bc6abb0b8b333 Oct 06 13:42:25 crc kubenswrapper[4757]: I1006 13:42:25.761328 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-zcwgr"] Oct 06 13:42:25 crc kubenswrapper[4757]: I1006 13:42:25.763997 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-zcwgr"] Oct 06 13:42:26 crc kubenswrapper[4757]: I1006 13:42:26.188362 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8db12020-2fce-47b5-936e-e792b08976f0" path="/var/lib/kubelet/pods/8db12020-2fce-47b5-936e-e792b08976f0/volumes" Oct 06 13:42:26 crc kubenswrapper[4757]: I1006 13:42:26.719003 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-79bf7467fb-xqzbm" event={"ID":"6a4712cb-3093-48ac-a85f-e0b06ed40698","Type":"ContainerStarted","Data":"bf6365d6a154b5d7cd18bcb00b5e3a38f173aa22e919cb2ea63303fc40bb8b79"} Oct 06 13:42:26 crc kubenswrapper[4757]: I1006 13:42:26.719090 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-79bf7467fb-xqzbm" event={"ID":"6a4712cb-3093-48ac-a85f-e0b06ed40698","Type":"ContainerStarted","Data":"c5b55d035311f4f96481cb953ca83a6a78c4032dc4bab263189bc6abb0b8b333"} Oct 06 13:42:26 crc kubenswrapper[4757]: I1006 13:42:26.720463 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-79bf7467fb-xqzbm" Oct 06 13:42:26 crc kubenswrapper[4757]: I1006 13:42:26.730652 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-79bf7467fb-xqzbm" Oct 06 13:42:26 crc kubenswrapper[4757]: I1006 13:42:26.758033 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-79bf7467fb-xqzbm" podStartSLOduration=27.758002262 podStartE2EDuration="27.758002262s" podCreationTimestamp="2025-10-06 13:41:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:42:26.750017912 +0000 UTC m=+235.247336469" watchObservedRunningTime="2025-10-06 13:42:26.758002262 +0000 UTC m=+235.255320809" Oct 06 13:42:52 crc kubenswrapper[4757]: I1006 13:42:52.564343 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-h5mpv"] Oct 06 13:42:52 crc kubenswrapper[4757]: I1006 13:42:52.565338 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-h5mpv" podUID="901fbe7e-0856-4210-b723-30952e1fe25d" containerName="registry-server" containerID="cri-o://90e98178792f3724f2eed442599a0c83eea13f255ca9bd3f4423934a1852fdd8" gracePeriod=30 Oct 06 13:42:52 crc kubenswrapper[4757]: I1006 13:42:52.575483 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-bdfrj"] Oct 06 13:42:52 crc kubenswrapper[4757]: I1006 13:42:52.575779 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-bdfrj" podUID="ab36484b-2956-4a35-8379-6b1fc3ffca49" containerName="registry-server" containerID="cri-o://0cbb512d91779e6096229e64f651a8715c19c59907badb660491d05178a622e9" gracePeriod=30 Oct 06 13:42:52 crc kubenswrapper[4757]: I1006 13:42:52.582557 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-f5469"] Oct 06 13:42:52 crc kubenswrapper[4757]: I1006 13:42:52.582775 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-f5469" podUID="b9804fd5-588e-4638-b1ff-f815e7b5f834" containerName="marketplace-operator" containerID="cri-o://6d672c41786b2ffeaddf158767358df22a70f039022d89e4a01676f45ce870ab" gracePeriod=30 Oct 06 13:42:52 crc kubenswrapper[4757]: I1006 13:42:52.592132 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-5m42j"] Oct 06 13:42:52 crc kubenswrapper[4757]: I1006 13:42:52.592434 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-5m42j" podUID="d2a35dbd-b700-4111-a6c4-eb7dc42cfbe7" containerName="registry-server" containerID="cri-o://5ebca2af7e8005869eafa12b26cf776eb99af35ae15950b64581581bbd786217" gracePeriod=30 Oct 06 13:42:52 crc kubenswrapper[4757]: I1006 13:42:52.601080 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-xrfzp"] Oct 06 13:42:52 crc kubenswrapper[4757]: I1006 13:42:52.601396 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-xrfzp" podUID="06706805-f5bd-459b-82c8-01bec4aab7ea" containerName="registry-server" containerID="cri-o://2019012ee5ec079e0f7f17e057ea96e9b0d50bfd61e15c088b415b7138e09f90" gracePeriod=30 Oct 06 13:42:52 crc kubenswrapper[4757]: I1006 13:42:52.607210 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-gfrpz"] Oct 06 13:42:52 crc kubenswrapper[4757]: I1006 13:42:52.607919 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-gfrpz" Oct 06 13:42:52 crc kubenswrapper[4757]: I1006 13:42:52.658578 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-gfrpz"] Oct 06 13:42:52 crc kubenswrapper[4757]: I1006 13:42:52.716764 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/3c7cc53d-90b4-4fc3-8993-2648eb34abf2-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-gfrpz\" (UID: \"3c7cc53d-90b4-4fc3-8993-2648eb34abf2\") " pod="openshift-marketplace/marketplace-operator-79b997595-gfrpz" Oct 06 13:42:52 crc kubenswrapper[4757]: I1006 13:42:52.717222 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/3c7cc53d-90b4-4fc3-8993-2648eb34abf2-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-gfrpz\" (UID: \"3c7cc53d-90b4-4fc3-8993-2648eb34abf2\") " pod="openshift-marketplace/marketplace-operator-79b997595-gfrpz" Oct 06 13:42:52 crc kubenswrapper[4757]: I1006 13:42:52.717246 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wqgh9\" (UniqueName: \"kubernetes.io/projected/3c7cc53d-90b4-4fc3-8993-2648eb34abf2-kube-api-access-wqgh9\") pod \"marketplace-operator-79b997595-gfrpz\" (UID: \"3c7cc53d-90b4-4fc3-8993-2648eb34abf2\") " pod="openshift-marketplace/marketplace-operator-79b997595-gfrpz" Oct 06 13:42:52 crc kubenswrapper[4757]: I1006 13:42:52.818820 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/3c7cc53d-90b4-4fc3-8993-2648eb34abf2-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-gfrpz\" (UID: \"3c7cc53d-90b4-4fc3-8993-2648eb34abf2\") " pod="openshift-marketplace/marketplace-operator-79b997595-gfrpz" Oct 06 13:42:52 crc kubenswrapper[4757]: I1006 13:42:52.818877 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/3c7cc53d-90b4-4fc3-8993-2648eb34abf2-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-gfrpz\" (UID: \"3c7cc53d-90b4-4fc3-8993-2648eb34abf2\") " pod="openshift-marketplace/marketplace-operator-79b997595-gfrpz" Oct 06 13:42:52 crc kubenswrapper[4757]: I1006 13:42:52.818900 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wqgh9\" (UniqueName: \"kubernetes.io/projected/3c7cc53d-90b4-4fc3-8993-2648eb34abf2-kube-api-access-wqgh9\") pod \"marketplace-operator-79b997595-gfrpz\" (UID: \"3c7cc53d-90b4-4fc3-8993-2648eb34abf2\") " pod="openshift-marketplace/marketplace-operator-79b997595-gfrpz" Oct 06 13:42:52 crc kubenswrapper[4757]: I1006 13:42:52.820306 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/3c7cc53d-90b4-4fc3-8993-2648eb34abf2-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-gfrpz\" (UID: \"3c7cc53d-90b4-4fc3-8993-2648eb34abf2\") " pod="openshift-marketplace/marketplace-operator-79b997595-gfrpz" Oct 06 13:42:52 crc kubenswrapper[4757]: I1006 13:42:52.829200 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/3c7cc53d-90b4-4fc3-8993-2648eb34abf2-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-gfrpz\" (UID: \"3c7cc53d-90b4-4fc3-8993-2648eb34abf2\") " pod="openshift-marketplace/marketplace-operator-79b997595-gfrpz" Oct 06 13:42:52 crc kubenswrapper[4757]: I1006 13:42:52.835919 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wqgh9\" (UniqueName: \"kubernetes.io/projected/3c7cc53d-90b4-4fc3-8993-2648eb34abf2-kube-api-access-wqgh9\") pod \"marketplace-operator-79b997595-gfrpz\" (UID: \"3c7cc53d-90b4-4fc3-8993-2648eb34abf2\") " pod="openshift-marketplace/marketplace-operator-79b997595-gfrpz" Oct 06 13:42:52 crc kubenswrapper[4757]: I1006 13:42:52.854816 4757 generic.go:334] "Generic (PLEG): container finished" podID="d2a35dbd-b700-4111-a6c4-eb7dc42cfbe7" containerID="5ebca2af7e8005869eafa12b26cf776eb99af35ae15950b64581581bbd786217" exitCode=0 Oct 06 13:42:52 crc kubenswrapper[4757]: I1006 13:42:52.854878 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5m42j" event={"ID":"d2a35dbd-b700-4111-a6c4-eb7dc42cfbe7","Type":"ContainerDied","Data":"5ebca2af7e8005869eafa12b26cf776eb99af35ae15950b64581581bbd786217"} Oct 06 13:42:52 crc kubenswrapper[4757]: I1006 13:42:52.857387 4757 generic.go:334] "Generic (PLEG): container finished" podID="901fbe7e-0856-4210-b723-30952e1fe25d" containerID="90e98178792f3724f2eed442599a0c83eea13f255ca9bd3f4423934a1852fdd8" exitCode=0 Oct 06 13:42:52 crc kubenswrapper[4757]: I1006 13:42:52.857463 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-h5mpv" event={"ID":"901fbe7e-0856-4210-b723-30952e1fe25d","Type":"ContainerDied","Data":"90e98178792f3724f2eed442599a0c83eea13f255ca9bd3f4423934a1852fdd8"} Oct 06 13:42:52 crc kubenswrapper[4757]: I1006 13:42:52.861709 4757 generic.go:334] "Generic (PLEG): container finished" podID="06706805-f5bd-459b-82c8-01bec4aab7ea" containerID="2019012ee5ec079e0f7f17e057ea96e9b0d50bfd61e15c088b415b7138e09f90" exitCode=0 Oct 06 13:42:52 crc kubenswrapper[4757]: I1006 13:42:52.861746 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xrfzp" event={"ID":"06706805-f5bd-459b-82c8-01bec4aab7ea","Type":"ContainerDied","Data":"2019012ee5ec079e0f7f17e057ea96e9b0d50bfd61e15c088b415b7138e09f90"} Oct 06 13:42:52 crc kubenswrapper[4757]: I1006 13:42:52.863349 4757 generic.go:334] "Generic (PLEG): container finished" podID="b9804fd5-588e-4638-b1ff-f815e7b5f834" containerID="6d672c41786b2ffeaddf158767358df22a70f039022d89e4a01676f45ce870ab" exitCode=0 Oct 06 13:42:52 crc kubenswrapper[4757]: I1006 13:42:52.863405 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-f5469" event={"ID":"b9804fd5-588e-4638-b1ff-f815e7b5f834","Type":"ContainerDied","Data":"6d672c41786b2ffeaddf158767358df22a70f039022d89e4a01676f45ce870ab"} Oct 06 13:42:52 crc kubenswrapper[4757]: I1006 13:42:52.865180 4757 generic.go:334] "Generic (PLEG): container finished" podID="ab36484b-2956-4a35-8379-6b1fc3ffca49" containerID="0cbb512d91779e6096229e64f651a8715c19c59907badb660491d05178a622e9" exitCode=0 Oct 06 13:42:52 crc kubenswrapper[4757]: I1006 13:42:52.865214 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bdfrj" event={"ID":"ab36484b-2956-4a35-8379-6b1fc3ffca49","Type":"ContainerDied","Data":"0cbb512d91779e6096229e64f651a8715c19c59907badb660491d05178a622e9"} Oct 06 13:42:52 crc kubenswrapper[4757]: I1006 13:42:52.929860 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-gfrpz" Oct 06 13:42:53 crc kubenswrapper[4757]: I1006 13:42:53.162832 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-h5mpv" Oct 06 13:42:53 crc kubenswrapper[4757]: I1006 13:42:53.324472 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/901fbe7e-0856-4210-b723-30952e1fe25d-catalog-content\") pod \"901fbe7e-0856-4210-b723-30952e1fe25d\" (UID: \"901fbe7e-0856-4210-b723-30952e1fe25d\") " Oct 06 13:42:53 crc kubenswrapper[4757]: I1006 13:42:53.324766 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/901fbe7e-0856-4210-b723-30952e1fe25d-utilities\") pod \"901fbe7e-0856-4210-b723-30952e1fe25d\" (UID: \"901fbe7e-0856-4210-b723-30952e1fe25d\") " Oct 06 13:42:53 crc kubenswrapper[4757]: I1006 13:42:53.324844 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ktf9k\" (UniqueName: \"kubernetes.io/projected/901fbe7e-0856-4210-b723-30952e1fe25d-kube-api-access-ktf9k\") pod \"901fbe7e-0856-4210-b723-30952e1fe25d\" (UID: \"901fbe7e-0856-4210-b723-30952e1fe25d\") " Oct 06 13:42:53 crc kubenswrapper[4757]: I1006 13:42:53.326380 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/901fbe7e-0856-4210-b723-30952e1fe25d-utilities" (OuterVolumeSpecName: "utilities") pod "901fbe7e-0856-4210-b723-30952e1fe25d" (UID: "901fbe7e-0856-4210-b723-30952e1fe25d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 13:42:53 crc kubenswrapper[4757]: I1006 13:42:53.328758 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/901fbe7e-0856-4210-b723-30952e1fe25d-kube-api-access-ktf9k" (OuterVolumeSpecName: "kube-api-access-ktf9k") pod "901fbe7e-0856-4210-b723-30952e1fe25d" (UID: "901fbe7e-0856-4210-b723-30952e1fe25d"). InnerVolumeSpecName "kube-api-access-ktf9k". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:42:53 crc kubenswrapper[4757]: I1006 13:42:53.355696 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5m42j" Oct 06 13:42:53 crc kubenswrapper[4757]: I1006 13:42:53.362993 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xrfzp" Oct 06 13:42:53 crc kubenswrapper[4757]: I1006 13:42:53.367468 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bdfrj" Oct 06 13:42:53 crc kubenswrapper[4757]: I1006 13:42:53.415060 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/901fbe7e-0856-4210-b723-30952e1fe25d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "901fbe7e-0856-4210-b723-30952e1fe25d" (UID: "901fbe7e-0856-4210-b723-30952e1fe25d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 13:42:53 crc kubenswrapper[4757]: I1006 13:42:53.427706 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7zcwv\" (UniqueName: \"kubernetes.io/projected/d2a35dbd-b700-4111-a6c4-eb7dc42cfbe7-kube-api-access-7zcwv\") pod \"d2a35dbd-b700-4111-a6c4-eb7dc42cfbe7\" (UID: \"d2a35dbd-b700-4111-a6c4-eb7dc42cfbe7\") " Oct 06 13:42:53 crc kubenswrapper[4757]: I1006 13:42:53.427858 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d2a35dbd-b700-4111-a6c4-eb7dc42cfbe7-catalog-content\") pod \"d2a35dbd-b700-4111-a6c4-eb7dc42cfbe7\" (UID: \"d2a35dbd-b700-4111-a6c4-eb7dc42cfbe7\") " Oct 06 13:42:53 crc kubenswrapper[4757]: I1006 13:42:53.427899 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d2a35dbd-b700-4111-a6c4-eb7dc42cfbe7-utilities\") pod \"d2a35dbd-b700-4111-a6c4-eb7dc42cfbe7\" (UID: \"d2a35dbd-b700-4111-a6c4-eb7dc42cfbe7\") " Oct 06 13:42:53 crc kubenswrapper[4757]: I1006 13:42:53.428156 4757 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/901fbe7e-0856-4210-b723-30952e1fe25d-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 06 13:42:53 crc kubenswrapper[4757]: I1006 13:42:53.428175 4757 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/901fbe7e-0856-4210-b723-30952e1fe25d-utilities\") on node \"crc\" DevicePath \"\"" Oct 06 13:42:53 crc kubenswrapper[4757]: I1006 13:42:53.428185 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ktf9k\" (UniqueName: \"kubernetes.io/projected/901fbe7e-0856-4210-b723-30952e1fe25d-kube-api-access-ktf9k\") on node \"crc\" DevicePath \"\"" Oct 06 13:42:53 crc kubenswrapper[4757]: I1006 13:42:53.429246 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d2a35dbd-b700-4111-a6c4-eb7dc42cfbe7-utilities" (OuterVolumeSpecName: "utilities") pod "d2a35dbd-b700-4111-a6c4-eb7dc42cfbe7" (UID: "d2a35dbd-b700-4111-a6c4-eb7dc42cfbe7"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 13:42:53 crc kubenswrapper[4757]: I1006 13:42:53.431608 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d2a35dbd-b700-4111-a6c4-eb7dc42cfbe7-kube-api-access-7zcwv" (OuterVolumeSpecName: "kube-api-access-7zcwv") pod "d2a35dbd-b700-4111-a6c4-eb7dc42cfbe7" (UID: "d2a35dbd-b700-4111-a6c4-eb7dc42cfbe7"). InnerVolumeSpecName "kube-api-access-7zcwv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:42:53 crc kubenswrapper[4757]: I1006 13:42:53.442146 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d2a35dbd-b700-4111-a6c4-eb7dc42cfbe7-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d2a35dbd-b700-4111-a6c4-eb7dc42cfbe7" (UID: "d2a35dbd-b700-4111-a6c4-eb7dc42cfbe7"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 13:42:53 crc kubenswrapper[4757]: I1006 13:42:53.480807 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-gfrpz"] Oct 06 13:42:53 crc kubenswrapper[4757]: I1006 13:42:53.528926 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fpl7n\" (UniqueName: \"kubernetes.io/projected/ab36484b-2956-4a35-8379-6b1fc3ffca49-kube-api-access-fpl7n\") pod \"ab36484b-2956-4a35-8379-6b1fc3ffca49\" (UID: \"ab36484b-2956-4a35-8379-6b1fc3ffca49\") " Oct 06 13:42:53 crc kubenswrapper[4757]: I1006 13:42:53.529076 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/06706805-f5bd-459b-82c8-01bec4aab7ea-catalog-content\") pod \"06706805-f5bd-459b-82c8-01bec4aab7ea\" (UID: \"06706805-f5bd-459b-82c8-01bec4aab7ea\") " Oct 06 13:42:53 crc kubenswrapper[4757]: I1006 13:42:53.529196 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/06706805-f5bd-459b-82c8-01bec4aab7ea-utilities\") pod \"06706805-f5bd-459b-82c8-01bec4aab7ea\" (UID: \"06706805-f5bd-459b-82c8-01bec4aab7ea\") " Oct 06 13:42:53 crc kubenswrapper[4757]: I1006 13:42:53.529272 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ab36484b-2956-4a35-8379-6b1fc3ffca49-catalog-content\") pod \"ab36484b-2956-4a35-8379-6b1fc3ffca49\" (UID: \"ab36484b-2956-4a35-8379-6b1fc3ffca49\") " Oct 06 13:42:53 crc kubenswrapper[4757]: I1006 13:42:53.529305 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ab36484b-2956-4a35-8379-6b1fc3ffca49-utilities\") pod \"ab36484b-2956-4a35-8379-6b1fc3ffca49\" (UID: \"ab36484b-2956-4a35-8379-6b1fc3ffca49\") " Oct 06 13:42:53 crc kubenswrapper[4757]: I1006 13:42:53.529329 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k8fpw\" (UniqueName: \"kubernetes.io/projected/06706805-f5bd-459b-82c8-01bec4aab7ea-kube-api-access-k8fpw\") pod \"06706805-f5bd-459b-82c8-01bec4aab7ea\" (UID: \"06706805-f5bd-459b-82c8-01bec4aab7ea\") " Oct 06 13:42:53 crc kubenswrapper[4757]: I1006 13:42:53.529603 4757 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d2a35dbd-b700-4111-a6c4-eb7dc42cfbe7-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 06 13:42:53 crc kubenswrapper[4757]: I1006 13:42:53.529619 4757 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d2a35dbd-b700-4111-a6c4-eb7dc42cfbe7-utilities\") on node \"crc\" DevicePath \"\"" Oct 06 13:42:53 crc kubenswrapper[4757]: I1006 13:42:53.529631 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7zcwv\" (UniqueName: \"kubernetes.io/projected/d2a35dbd-b700-4111-a6c4-eb7dc42cfbe7-kube-api-access-7zcwv\") on node \"crc\" DevicePath \"\"" Oct 06 13:42:53 crc kubenswrapper[4757]: I1006 13:42:53.530226 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ab36484b-2956-4a35-8379-6b1fc3ffca49-utilities" (OuterVolumeSpecName: "utilities") pod "ab36484b-2956-4a35-8379-6b1fc3ffca49" (UID: "ab36484b-2956-4a35-8379-6b1fc3ffca49"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 13:42:53 crc kubenswrapper[4757]: I1006 13:42:53.530861 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/06706805-f5bd-459b-82c8-01bec4aab7ea-utilities" (OuterVolumeSpecName: "utilities") pod "06706805-f5bd-459b-82c8-01bec4aab7ea" (UID: "06706805-f5bd-459b-82c8-01bec4aab7ea"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 13:42:53 crc kubenswrapper[4757]: I1006 13:42:53.532721 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ab36484b-2956-4a35-8379-6b1fc3ffca49-kube-api-access-fpl7n" (OuterVolumeSpecName: "kube-api-access-fpl7n") pod "ab36484b-2956-4a35-8379-6b1fc3ffca49" (UID: "ab36484b-2956-4a35-8379-6b1fc3ffca49"). InnerVolumeSpecName "kube-api-access-fpl7n". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:42:53 crc kubenswrapper[4757]: I1006 13:42:53.532965 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/06706805-f5bd-459b-82c8-01bec4aab7ea-kube-api-access-k8fpw" (OuterVolumeSpecName: "kube-api-access-k8fpw") pod "06706805-f5bd-459b-82c8-01bec4aab7ea" (UID: "06706805-f5bd-459b-82c8-01bec4aab7ea"). InnerVolumeSpecName "kube-api-access-k8fpw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:42:53 crc kubenswrapper[4757]: I1006 13:42:53.579738 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-f5469" Oct 06 13:42:53 crc kubenswrapper[4757]: I1006 13:42:53.630607 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fpl7n\" (UniqueName: \"kubernetes.io/projected/ab36484b-2956-4a35-8379-6b1fc3ffca49-kube-api-access-fpl7n\") on node \"crc\" DevicePath \"\"" Oct 06 13:42:53 crc kubenswrapper[4757]: I1006 13:42:53.630646 4757 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/06706805-f5bd-459b-82c8-01bec4aab7ea-utilities\") on node \"crc\" DevicePath \"\"" Oct 06 13:42:53 crc kubenswrapper[4757]: I1006 13:42:53.630659 4757 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ab36484b-2956-4a35-8379-6b1fc3ffca49-utilities\") on node \"crc\" DevicePath \"\"" Oct 06 13:42:53 crc kubenswrapper[4757]: I1006 13:42:53.630667 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k8fpw\" (UniqueName: \"kubernetes.io/projected/06706805-f5bd-459b-82c8-01bec4aab7ea-kube-api-access-k8fpw\") on node \"crc\" DevicePath \"\"" Oct 06 13:42:53 crc kubenswrapper[4757]: I1006 13:42:53.681908 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ab36484b-2956-4a35-8379-6b1fc3ffca49-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ab36484b-2956-4a35-8379-6b1fc3ffca49" (UID: "ab36484b-2956-4a35-8379-6b1fc3ffca49"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 13:42:53 crc kubenswrapper[4757]: I1006 13:42:53.726061 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/06706805-f5bd-459b-82c8-01bec4aab7ea-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "06706805-f5bd-459b-82c8-01bec4aab7ea" (UID: "06706805-f5bd-459b-82c8-01bec4aab7ea"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 13:42:53 crc kubenswrapper[4757]: I1006 13:42:53.731399 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b9804fd5-588e-4638-b1ff-f815e7b5f834-marketplace-trusted-ca\") pod \"b9804fd5-588e-4638-b1ff-f815e7b5f834\" (UID: \"b9804fd5-588e-4638-b1ff-f815e7b5f834\") " Oct 06 13:42:53 crc kubenswrapper[4757]: I1006 13:42:53.731655 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b9804fd5-588e-4638-b1ff-f815e7b5f834-marketplace-operator-metrics\") pod \"b9804fd5-588e-4638-b1ff-f815e7b5f834\" (UID: \"b9804fd5-588e-4638-b1ff-f815e7b5f834\") " Oct 06 13:42:53 crc kubenswrapper[4757]: I1006 13:42:53.732401 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s2rtm\" (UniqueName: \"kubernetes.io/projected/b9804fd5-588e-4638-b1ff-f815e7b5f834-kube-api-access-s2rtm\") pod \"b9804fd5-588e-4638-b1ff-f815e7b5f834\" (UID: \"b9804fd5-588e-4638-b1ff-f815e7b5f834\") " Oct 06 13:42:53 crc kubenswrapper[4757]: I1006 13:42:53.732610 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b9804fd5-588e-4638-b1ff-f815e7b5f834-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b9804fd5-588e-4638-b1ff-f815e7b5f834" (UID: "b9804fd5-588e-4638-b1ff-f815e7b5f834"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:42:53 crc kubenswrapper[4757]: I1006 13:42:53.732673 4757 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/06706805-f5bd-459b-82c8-01bec4aab7ea-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 06 13:42:53 crc kubenswrapper[4757]: I1006 13:42:53.732781 4757 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ab36484b-2956-4a35-8379-6b1fc3ffca49-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 06 13:42:53 crc kubenswrapper[4757]: I1006 13:42:53.737221 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b9804fd5-588e-4638-b1ff-f815e7b5f834-kube-api-access-s2rtm" (OuterVolumeSpecName: "kube-api-access-s2rtm") pod "b9804fd5-588e-4638-b1ff-f815e7b5f834" (UID: "b9804fd5-588e-4638-b1ff-f815e7b5f834"). InnerVolumeSpecName "kube-api-access-s2rtm". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:42:53 crc kubenswrapper[4757]: I1006 13:42:53.737580 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b9804fd5-588e-4638-b1ff-f815e7b5f834-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b9804fd5-588e-4638-b1ff-f815e7b5f834" (UID: "b9804fd5-588e-4638-b1ff-f815e7b5f834"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:42:53 crc kubenswrapper[4757]: I1006 13:42:53.834193 4757 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b9804fd5-588e-4638-b1ff-f815e7b5f834-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Oct 06 13:42:53 crc kubenswrapper[4757]: I1006 13:42:53.834239 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s2rtm\" (UniqueName: \"kubernetes.io/projected/b9804fd5-588e-4638-b1ff-f815e7b5f834-kube-api-access-s2rtm\") on node \"crc\" DevicePath \"\"" Oct 06 13:42:53 crc kubenswrapper[4757]: I1006 13:42:53.834253 4757 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b9804fd5-588e-4638-b1ff-f815e7b5f834-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 06 13:42:53 crc kubenswrapper[4757]: I1006 13:42:53.879388 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5m42j" event={"ID":"d2a35dbd-b700-4111-a6c4-eb7dc42cfbe7","Type":"ContainerDied","Data":"7e6b2992c6d1893e8c6ffdd7e390bf5a3704471f08457015f24f9a3f24782d90"} Oct 06 13:42:53 crc kubenswrapper[4757]: I1006 13:42:53.879441 4757 scope.go:117] "RemoveContainer" containerID="5ebca2af7e8005869eafa12b26cf776eb99af35ae15950b64581581bbd786217" Oct 06 13:42:53 crc kubenswrapper[4757]: I1006 13:42:53.879590 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5m42j" Oct 06 13:42:53 crc kubenswrapper[4757]: I1006 13:42:53.891470 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-h5mpv" event={"ID":"901fbe7e-0856-4210-b723-30952e1fe25d","Type":"ContainerDied","Data":"d4a20230271817d5cd81614abc4eaba4391d6c4f9f924ccda4d0b1a7cef6aa80"} Oct 06 13:42:53 crc kubenswrapper[4757]: I1006 13:42:53.891489 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-h5mpv" Oct 06 13:42:53 crc kubenswrapper[4757]: I1006 13:42:53.894220 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xrfzp" event={"ID":"06706805-f5bd-459b-82c8-01bec4aab7ea","Type":"ContainerDied","Data":"d695433f45a8cf8e6905870dba270d0511af9b1f5fa329bf075ddce3a7984fd1"} Oct 06 13:42:53 crc kubenswrapper[4757]: I1006 13:42:53.894272 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xrfzp" Oct 06 13:42:53 crc kubenswrapper[4757]: I1006 13:42:53.895750 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-f5469" event={"ID":"b9804fd5-588e-4638-b1ff-f815e7b5f834","Type":"ContainerDied","Data":"b59d41d25f8b0c581e3101c892da4615dd7e1327cf07d841759d5c3c5a98feaf"} Oct 06 13:42:53 crc kubenswrapper[4757]: I1006 13:42:53.895771 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-f5469" Oct 06 13:42:53 crc kubenswrapper[4757]: I1006 13:42:53.898851 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bdfrj" Oct 06 13:42:53 crc kubenswrapper[4757]: I1006 13:42:53.899636 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bdfrj" event={"ID":"ab36484b-2956-4a35-8379-6b1fc3ffca49","Type":"ContainerDied","Data":"1b0e350f3bf923bfde15d553339b8e79de047f7549ac54d742d31453d7329bc5"} Oct 06 13:42:53 crc kubenswrapper[4757]: I1006 13:42:53.900964 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-gfrpz" event={"ID":"3c7cc53d-90b4-4fc3-8993-2648eb34abf2","Type":"ContainerStarted","Data":"afbdc131777ea0234163eacb5ecc95f3a712073d002f40d712455365cc9d5abe"} Oct 06 13:42:53 crc kubenswrapper[4757]: I1006 13:42:53.901095 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-gfrpz" event={"ID":"3c7cc53d-90b4-4fc3-8993-2648eb34abf2","Type":"ContainerStarted","Data":"e92b1c4e905027d5ea0de856cc14c45ca34e51dfc401917c4d6767effec99356"} Oct 06 13:42:53 crc kubenswrapper[4757]: I1006 13:42:53.901326 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-gfrpz" Oct 06 13:42:53 crc kubenswrapper[4757]: I1006 13:42:53.902773 4757 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-gfrpz container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.55:8080/healthz\": dial tcp 10.217.0.55:8080: connect: connection refused" start-of-body= Oct 06 13:42:53 crc kubenswrapper[4757]: I1006 13:42:53.902844 4757 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-gfrpz" podUID="3c7cc53d-90b4-4fc3-8993-2648eb34abf2" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.55:8080/healthz\": dial tcp 10.217.0.55:8080: connect: connection refused" Oct 06 13:42:53 crc kubenswrapper[4757]: I1006 13:42:53.908683 4757 scope.go:117] "RemoveContainer" containerID="525aa25baa48255d37b237a1c51fdc6630b3298430a715a3ec2f32459329a6c3" Oct 06 13:42:53 crc kubenswrapper[4757]: I1006 13:42:53.928468 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-gfrpz" podStartSLOduration=1.928447547 podStartE2EDuration="1.928447547s" podCreationTimestamp="2025-10-06 13:42:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:42:53.925262211 +0000 UTC m=+262.422580758" watchObservedRunningTime="2025-10-06 13:42:53.928447547 +0000 UTC m=+262.425766094" Oct 06 13:42:53 crc kubenswrapper[4757]: I1006 13:42:53.934295 4757 scope.go:117] "RemoveContainer" containerID="67edf6601fa8e2737de5075910234803fc55ed9b8866b5c37ebf5303cc3fc446" Oct 06 13:42:53 crc kubenswrapper[4757]: I1006 13:42:53.949811 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-5m42j"] Oct 06 13:42:53 crc kubenswrapper[4757]: I1006 13:42:53.959478 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-5m42j"] Oct 06 13:42:53 crc kubenswrapper[4757]: I1006 13:42:53.966002 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-h5mpv"] Oct 06 13:42:53 crc kubenswrapper[4757]: I1006 13:42:53.967360 4757 scope.go:117] "RemoveContainer" containerID="90e98178792f3724f2eed442599a0c83eea13f255ca9bd3f4423934a1852fdd8" Oct 06 13:42:53 crc kubenswrapper[4757]: I1006 13:42:53.972946 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-h5mpv"] Oct 06 13:42:53 crc kubenswrapper[4757]: I1006 13:42:53.982032 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-f5469"] Oct 06 13:42:53 crc kubenswrapper[4757]: I1006 13:42:53.985317 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-f5469"] Oct 06 13:42:53 crc kubenswrapper[4757]: I1006 13:42:53.996483 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-bdfrj"] Oct 06 13:42:53 crc kubenswrapper[4757]: I1006 13:42:53.997198 4757 scope.go:117] "RemoveContainer" containerID="d3f9dfec115943bc6f5430f9b10277bbc8eab2422af1ff0b849df29c02640124" Oct 06 13:42:54 crc kubenswrapper[4757]: I1006 13:42:54.008940 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-bdfrj"] Oct 06 13:42:54 crc kubenswrapper[4757]: I1006 13:42:54.012356 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-xrfzp"] Oct 06 13:42:54 crc kubenswrapper[4757]: I1006 13:42:54.017280 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-xrfzp"] Oct 06 13:42:54 crc kubenswrapper[4757]: I1006 13:42:54.017398 4757 scope.go:117] "RemoveContainer" containerID="af7a4474fb44ba23925187c4320bbf38aff8eb46dc7be1ee2f764ee24cffc76e" Oct 06 13:42:54 crc kubenswrapper[4757]: I1006 13:42:54.040941 4757 scope.go:117] "RemoveContainer" containerID="2019012ee5ec079e0f7f17e057ea96e9b0d50bfd61e15c088b415b7138e09f90" Oct 06 13:42:54 crc kubenswrapper[4757]: I1006 13:42:54.058875 4757 scope.go:117] "RemoveContainer" containerID="8b22e7dcd2126a007a106b33a10e96d5ec029f1461ee130daea1830e20164e08" Oct 06 13:42:54 crc kubenswrapper[4757]: I1006 13:42:54.075621 4757 scope.go:117] "RemoveContainer" containerID="dabda7f2477df34bb2918c0bbfb3a96aaaa209f0c3a20fc039c654a8b35605a4" Oct 06 13:42:54 crc kubenswrapper[4757]: I1006 13:42:54.089300 4757 scope.go:117] "RemoveContainer" containerID="6d672c41786b2ffeaddf158767358df22a70f039022d89e4a01676f45ce870ab" Oct 06 13:42:54 crc kubenswrapper[4757]: I1006 13:42:54.102400 4757 scope.go:117] "RemoveContainer" containerID="0cbb512d91779e6096229e64f651a8715c19c59907badb660491d05178a622e9" Oct 06 13:42:54 crc kubenswrapper[4757]: I1006 13:42:54.117817 4757 scope.go:117] "RemoveContainer" containerID="e8e3ec44640774d1a04d895be69915b0002c94e1203d16f57bb58dd2377c9e9f" Oct 06 13:42:54 crc kubenswrapper[4757]: I1006 13:42:54.133570 4757 scope.go:117] "RemoveContainer" containerID="edad05f0c30cde4046294ceeae626b7c2072ffcbc83d031bdc5e0e92610ad0ec" Oct 06 13:42:54 crc kubenswrapper[4757]: I1006 13:42:54.186230 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="06706805-f5bd-459b-82c8-01bec4aab7ea" path="/var/lib/kubelet/pods/06706805-f5bd-459b-82c8-01bec4aab7ea/volumes" Oct 06 13:42:54 crc kubenswrapper[4757]: I1006 13:42:54.187247 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="901fbe7e-0856-4210-b723-30952e1fe25d" path="/var/lib/kubelet/pods/901fbe7e-0856-4210-b723-30952e1fe25d/volumes" Oct 06 13:42:54 crc kubenswrapper[4757]: I1006 13:42:54.188192 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ab36484b-2956-4a35-8379-6b1fc3ffca49" path="/var/lib/kubelet/pods/ab36484b-2956-4a35-8379-6b1fc3ffca49/volumes" Oct 06 13:42:54 crc kubenswrapper[4757]: I1006 13:42:54.189812 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b9804fd5-588e-4638-b1ff-f815e7b5f834" path="/var/lib/kubelet/pods/b9804fd5-588e-4638-b1ff-f815e7b5f834/volumes" Oct 06 13:42:54 crc kubenswrapper[4757]: I1006 13:42:54.190482 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d2a35dbd-b700-4111-a6c4-eb7dc42cfbe7" path="/var/lib/kubelet/pods/d2a35dbd-b700-4111-a6c4-eb7dc42cfbe7/volumes" Oct 06 13:42:54 crc kubenswrapper[4757]: I1006 13:42:54.783903 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-hfhqn"] Oct 06 13:42:54 crc kubenswrapper[4757]: E1006 13:42:54.784450 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06706805-f5bd-459b-82c8-01bec4aab7ea" containerName="extract-utilities" Oct 06 13:42:54 crc kubenswrapper[4757]: I1006 13:42:54.784463 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="06706805-f5bd-459b-82c8-01bec4aab7ea" containerName="extract-utilities" Oct 06 13:42:54 crc kubenswrapper[4757]: E1006 13:42:54.784475 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9804fd5-588e-4638-b1ff-f815e7b5f834" containerName="marketplace-operator" Oct 06 13:42:54 crc kubenswrapper[4757]: I1006 13:42:54.784483 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9804fd5-588e-4638-b1ff-f815e7b5f834" containerName="marketplace-operator" Oct 06 13:42:54 crc kubenswrapper[4757]: E1006 13:42:54.784493 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06706805-f5bd-459b-82c8-01bec4aab7ea" containerName="extract-content" Oct 06 13:42:54 crc kubenswrapper[4757]: I1006 13:42:54.784500 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="06706805-f5bd-459b-82c8-01bec4aab7ea" containerName="extract-content" Oct 06 13:42:54 crc kubenswrapper[4757]: E1006 13:42:54.784508 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab36484b-2956-4a35-8379-6b1fc3ffca49" containerName="extract-content" Oct 06 13:42:54 crc kubenswrapper[4757]: I1006 13:42:54.784514 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab36484b-2956-4a35-8379-6b1fc3ffca49" containerName="extract-content" Oct 06 13:42:54 crc kubenswrapper[4757]: E1006 13:42:54.784520 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab36484b-2956-4a35-8379-6b1fc3ffca49" containerName="registry-server" Oct 06 13:42:54 crc kubenswrapper[4757]: I1006 13:42:54.784526 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab36484b-2956-4a35-8379-6b1fc3ffca49" containerName="registry-server" Oct 06 13:42:54 crc kubenswrapper[4757]: E1006 13:42:54.784533 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="901fbe7e-0856-4210-b723-30952e1fe25d" containerName="extract-utilities" Oct 06 13:42:54 crc kubenswrapper[4757]: I1006 13:42:54.784538 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="901fbe7e-0856-4210-b723-30952e1fe25d" containerName="extract-utilities" Oct 06 13:42:54 crc kubenswrapper[4757]: E1006 13:42:54.784544 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="901fbe7e-0856-4210-b723-30952e1fe25d" containerName="extract-content" Oct 06 13:42:54 crc kubenswrapper[4757]: I1006 13:42:54.784550 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="901fbe7e-0856-4210-b723-30952e1fe25d" containerName="extract-content" Oct 06 13:42:54 crc kubenswrapper[4757]: E1006 13:42:54.784560 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06706805-f5bd-459b-82c8-01bec4aab7ea" containerName="registry-server" Oct 06 13:42:54 crc kubenswrapper[4757]: I1006 13:42:54.784566 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="06706805-f5bd-459b-82c8-01bec4aab7ea" containerName="registry-server" Oct 06 13:42:54 crc kubenswrapper[4757]: E1006 13:42:54.784573 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab36484b-2956-4a35-8379-6b1fc3ffca49" containerName="extract-utilities" Oct 06 13:42:54 crc kubenswrapper[4757]: I1006 13:42:54.784579 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab36484b-2956-4a35-8379-6b1fc3ffca49" containerName="extract-utilities" Oct 06 13:42:54 crc kubenswrapper[4757]: E1006 13:42:54.784588 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d2a35dbd-b700-4111-a6c4-eb7dc42cfbe7" containerName="extract-content" Oct 06 13:42:54 crc kubenswrapper[4757]: I1006 13:42:54.784596 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="d2a35dbd-b700-4111-a6c4-eb7dc42cfbe7" containerName="extract-content" Oct 06 13:42:54 crc kubenswrapper[4757]: E1006 13:42:54.784610 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="901fbe7e-0856-4210-b723-30952e1fe25d" containerName="registry-server" Oct 06 13:42:54 crc kubenswrapper[4757]: I1006 13:42:54.784654 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="901fbe7e-0856-4210-b723-30952e1fe25d" containerName="registry-server" Oct 06 13:42:54 crc kubenswrapper[4757]: E1006 13:42:54.784664 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d2a35dbd-b700-4111-a6c4-eb7dc42cfbe7" containerName="extract-utilities" Oct 06 13:42:54 crc kubenswrapper[4757]: I1006 13:42:54.784671 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="d2a35dbd-b700-4111-a6c4-eb7dc42cfbe7" containerName="extract-utilities" Oct 06 13:42:54 crc kubenswrapper[4757]: E1006 13:42:54.784677 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d2a35dbd-b700-4111-a6c4-eb7dc42cfbe7" containerName="registry-server" Oct 06 13:42:54 crc kubenswrapper[4757]: I1006 13:42:54.784683 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="d2a35dbd-b700-4111-a6c4-eb7dc42cfbe7" containerName="registry-server" Oct 06 13:42:54 crc kubenswrapper[4757]: I1006 13:42:54.785380 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="d2a35dbd-b700-4111-a6c4-eb7dc42cfbe7" containerName="registry-server" Oct 06 13:42:54 crc kubenswrapper[4757]: I1006 13:42:54.785411 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="b9804fd5-588e-4638-b1ff-f815e7b5f834" containerName="marketplace-operator" Oct 06 13:42:54 crc kubenswrapper[4757]: I1006 13:42:54.785424 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="06706805-f5bd-459b-82c8-01bec4aab7ea" containerName="registry-server" Oct 06 13:42:54 crc kubenswrapper[4757]: I1006 13:42:54.785435 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="ab36484b-2956-4a35-8379-6b1fc3ffca49" containerName="registry-server" Oct 06 13:42:54 crc kubenswrapper[4757]: I1006 13:42:54.785453 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="901fbe7e-0856-4210-b723-30952e1fe25d" containerName="registry-server" Oct 06 13:42:54 crc kubenswrapper[4757]: I1006 13:42:54.790427 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-hfhqn" Oct 06 13:42:54 crc kubenswrapper[4757]: I1006 13:42:54.795232 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Oct 06 13:42:54 crc kubenswrapper[4757]: I1006 13:42:54.810795 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-hfhqn"] Oct 06 13:42:54 crc kubenswrapper[4757]: I1006 13:42:54.913453 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-gfrpz" Oct 06 13:42:54 crc kubenswrapper[4757]: I1006 13:42:54.948140 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x8cjz\" (UniqueName: \"kubernetes.io/projected/2803bf75-1028-41a4-9533-e4ec37c0872a-kube-api-access-x8cjz\") pod \"redhat-marketplace-hfhqn\" (UID: \"2803bf75-1028-41a4-9533-e4ec37c0872a\") " pod="openshift-marketplace/redhat-marketplace-hfhqn" Oct 06 13:42:54 crc kubenswrapper[4757]: I1006 13:42:54.948247 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2803bf75-1028-41a4-9533-e4ec37c0872a-catalog-content\") pod \"redhat-marketplace-hfhqn\" (UID: \"2803bf75-1028-41a4-9533-e4ec37c0872a\") " pod="openshift-marketplace/redhat-marketplace-hfhqn" Oct 06 13:42:54 crc kubenswrapper[4757]: I1006 13:42:54.948299 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2803bf75-1028-41a4-9533-e4ec37c0872a-utilities\") pod \"redhat-marketplace-hfhqn\" (UID: \"2803bf75-1028-41a4-9533-e4ec37c0872a\") " pod="openshift-marketplace/redhat-marketplace-hfhqn" Oct 06 13:42:54 crc kubenswrapper[4757]: I1006 13:42:54.984003 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-rlx44"] Oct 06 13:42:54 crc kubenswrapper[4757]: I1006 13:42:54.985238 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rlx44" Oct 06 13:42:54 crc kubenswrapper[4757]: I1006 13:42:54.987918 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Oct 06 13:42:54 crc kubenswrapper[4757]: I1006 13:42:54.997847 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-rlx44"] Oct 06 13:42:55 crc kubenswrapper[4757]: I1006 13:42:55.049817 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x8cjz\" (UniqueName: \"kubernetes.io/projected/2803bf75-1028-41a4-9533-e4ec37c0872a-kube-api-access-x8cjz\") pod \"redhat-marketplace-hfhqn\" (UID: \"2803bf75-1028-41a4-9533-e4ec37c0872a\") " pod="openshift-marketplace/redhat-marketplace-hfhqn" Oct 06 13:42:55 crc kubenswrapper[4757]: I1006 13:42:55.049997 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2803bf75-1028-41a4-9533-e4ec37c0872a-catalog-content\") pod \"redhat-marketplace-hfhqn\" (UID: \"2803bf75-1028-41a4-9533-e4ec37c0872a\") " pod="openshift-marketplace/redhat-marketplace-hfhqn" Oct 06 13:42:55 crc kubenswrapper[4757]: I1006 13:42:55.050056 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2803bf75-1028-41a4-9533-e4ec37c0872a-utilities\") pod \"redhat-marketplace-hfhqn\" (UID: \"2803bf75-1028-41a4-9533-e4ec37c0872a\") " pod="openshift-marketplace/redhat-marketplace-hfhqn" Oct 06 13:42:55 crc kubenswrapper[4757]: I1006 13:42:55.050755 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2803bf75-1028-41a4-9533-e4ec37c0872a-catalog-content\") pod \"redhat-marketplace-hfhqn\" (UID: \"2803bf75-1028-41a4-9533-e4ec37c0872a\") " pod="openshift-marketplace/redhat-marketplace-hfhqn" Oct 06 13:42:55 crc kubenswrapper[4757]: I1006 13:42:55.050841 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2803bf75-1028-41a4-9533-e4ec37c0872a-utilities\") pod \"redhat-marketplace-hfhqn\" (UID: \"2803bf75-1028-41a4-9533-e4ec37c0872a\") " pod="openshift-marketplace/redhat-marketplace-hfhqn" Oct 06 13:42:55 crc kubenswrapper[4757]: I1006 13:42:55.070798 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x8cjz\" (UniqueName: \"kubernetes.io/projected/2803bf75-1028-41a4-9533-e4ec37c0872a-kube-api-access-x8cjz\") pod \"redhat-marketplace-hfhqn\" (UID: \"2803bf75-1028-41a4-9533-e4ec37c0872a\") " pod="openshift-marketplace/redhat-marketplace-hfhqn" Oct 06 13:42:55 crc kubenswrapper[4757]: I1006 13:42:55.120916 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-hfhqn" Oct 06 13:42:55 crc kubenswrapper[4757]: I1006 13:42:55.151195 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s8g9b\" (UniqueName: \"kubernetes.io/projected/4251acbc-50b9-4729-8fac-0186245abb51-kube-api-access-s8g9b\") pod \"redhat-operators-rlx44\" (UID: \"4251acbc-50b9-4729-8fac-0186245abb51\") " pod="openshift-marketplace/redhat-operators-rlx44" Oct 06 13:42:55 crc kubenswrapper[4757]: I1006 13:42:55.151273 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4251acbc-50b9-4729-8fac-0186245abb51-catalog-content\") pod \"redhat-operators-rlx44\" (UID: \"4251acbc-50b9-4729-8fac-0186245abb51\") " pod="openshift-marketplace/redhat-operators-rlx44" Oct 06 13:42:55 crc kubenswrapper[4757]: I1006 13:42:55.151358 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4251acbc-50b9-4729-8fac-0186245abb51-utilities\") pod \"redhat-operators-rlx44\" (UID: \"4251acbc-50b9-4729-8fac-0186245abb51\") " pod="openshift-marketplace/redhat-operators-rlx44" Oct 06 13:42:55 crc kubenswrapper[4757]: I1006 13:42:55.252913 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4251acbc-50b9-4729-8fac-0186245abb51-catalog-content\") pod \"redhat-operators-rlx44\" (UID: \"4251acbc-50b9-4729-8fac-0186245abb51\") " pod="openshift-marketplace/redhat-operators-rlx44" Oct 06 13:42:55 crc kubenswrapper[4757]: I1006 13:42:55.253289 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4251acbc-50b9-4729-8fac-0186245abb51-utilities\") pod \"redhat-operators-rlx44\" (UID: \"4251acbc-50b9-4729-8fac-0186245abb51\") " pod="openshift-marketplace/redhat-operators-rlx44" Oct 06 13:42:55 crc kubenswrapper[4757]: I1006 13:42:55.253319 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s8g9b\" (UniqueName: \"kubernetes.io/projected/4251acbc-50b9-4729-8fac-0186245abb51-kube-api-access-s8g9b\") pod \"redhat-operators-rlx44\" (UID: \"4251acbc-50b9-4729-8fac-0186245abb51\") " pod="openshift-marketplace/redhat-operators-rlx44" Oct 06 13:42:55 crc kubenswrapper[4757]: I1006 13:42:55.253404 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4251acbc-50b9-4729-8fac-0186245abb51-catalog-content\") pod \"redhat-operators-rlx44\" (UID: \"4251acbc-50b9-4729-8fac-0186245abb51\") " pod="openshift-marketplace/redhat-operators-rlx44" Oct 06 13:42:55 crc kubenswrapper[4757]: I1006 13:42:55.253679 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4251acbc-50b9-4729-8fac-0186245abb51-utilities\") pod \"redhat-operators-rlx44\" (UID: \"4251acbc-50b9-4729-8fac-0186245abb51\") " pod="openshift-marketplace/redhat-operators-rlx44" Oct 06 13:42:55 crc kubenswrapper[4757]: I1006 13:42:55.273699 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s8g9b\" (UniqueName: \"kubernetes.io/projected/4251acbc-50b9-4729-8fac-0186245abb51-kube-api-access-s8g9b\") pod \"redhat-operators-rlx44\" (UID: \"4251acbc-50b9-4729-8fac-0186245abb51\") " pod="openshift-marketplace/redhat-operators-rlx44" Oct 06 13:42:55 crc kubenswrapper[4757]: I1006 13:42:55.309032 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rlx44" Oct 06 13:42:55 crc kubenswrapper[4757]: I1006 13:42:55.501784 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-hfhqn"] Oct 06 13:42:55 crc kubenswrapper[4757]: W1006 13:42:55.509228 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2803bf75_1028_41a4_9533_e4ec37c0872a.slice/crio-a4af42da708f98a3e84155fcd6c638f29719c00f51d8bfa8c0b657f07e7758c0 WatchSource:0}: Error finding container a4af42da708f98a3e84155fcd6c638f29719c00f51d8bfa8c0b657f07e7758c0: Status 404 returned error can't find the container with id a4af42da708f98a3e84155fcd6c638f29719c00f51d8bfa8c0b657f07e7758c0 Oct 06 13:42:55 crc kubenswrapper[4757]: I1006 13:42:55.770946 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-rlx44"] Oct 06 13:42:55 crc kubenswrapper[4757]: W1006 13:42:55.781480 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4251acbc_50b9_4729_8fac_0186245abb51.slice/crio-7a4ed93af7fd934a3a0380638fdb5e1bd3396daa31794e646139b6cbf163e306 WatchSource:0}: Error finding container 7a4ed93af7fd934a3a0380638fdb5e1bd3396daa31794e646139b6cbf163e306: Status 404 returned error can't find the container with id 7a4ed93af7fd934a3a0380638fdb5e1bd3396daa31794e646139b6cbf163e306 Oct 06 13:42:55 crc kubenswrapper[4757]: I1006 13:42:55.915973 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rlx44" event={"ID":"4251acbc-50b9-4729-8fac-0186245abb51","Type":"ContainerStarted","Data":"7a4ed93af7fd934a3a0380638fdb5e1bd3396daa31794e646139b6cbf163e306"} Oct 06 13:42:55 crc kubenswrapper[4757]: I1006 13:42:55.917951 4757 generic.go:334] "Generic (PLEG): container finished" podID="2803bf75-1028-41a4-9533-e4ec37c0872a" containerID="bda22651a3a7e43c7262d89b180ddf6aae18bc0423724cf03c2f5b6ef6f066b5" exitCode=0 Oct 06 13:42:55 crc kubenswrapper[4757]: I1006 13:42:55.918009 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hfhqn" event={"ID":"2803bf75-1028-41a4-9533-e4ec37c0872a","Type":"ContainerDied","Data":"bda22651a3a7e43c7262d89b180ddf6aae18bc0423724cf03c2f5b6ef6f066b5"} Oct 06 13:42:55 crc kubenswrapper[4757]: I1006 13:42:55.918044 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hfhqn" event={"ID":"2803bf75-1028-41a4-9533-e4ec37c0872a","Type":"ContainerStarted","Data":"a4af42da708f98a3e84155fcd6c638f29719c00f51d8bfa8c0b657f07e7758c0"} Oct 06 13:42:56 crc kubenswrapper[4757]: I1006 13:42:56.925648 4757 generic.go:334] "Generic (PLEG): container finished" podID="4251acbc-50b9-4729-8fac-0186245abb51" containerID="abb56cdc9179190c6aa1b651ebe800e9b25f37e3914a3fe483900865f9904ee2" exitCode=0 Oct 06 13:42:56 crc kubenswrapper[4757]: I1006 13:42:56.925731 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rlx44" event={"ID":"4251acbc-50b9-4729-8fac-0186245abb51","Type":"ContainerDied","Data":"abb56cdc9179190c6aa1b651ebe800e9b25f37e3914a3fe483900865f9904ee2"} Oct 06 13:42:57 crc kubenswrapper[4757]: I1006 13:42:57.182960 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-lmdhq"] Oct 06 13:42:57 crc kubenswrapper[4757]: I1006 13:42:57.184384 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lmdhq" Oct 06 13:42:57 crc kubenswrapper[4757]: I1006 13:42:57.189186 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Oct 06 13:42:57 crc kubenswrapper[4757]: I1006 13:42:57.193867 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-lmdhq"] Oct 06 13:42:57 crc kubenswrapper[4757]: I1006 13:42:57.278847 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aee16345-631c-48f8-a83b-bd30a9c62c60-utilities\") pod \"certified-operators-lmdhq\" (UID: \"aee16345-631c-48f8-a83b-bd30a9c62c60\") " pod="openshift-marketplace/certified-operators-lmdhq" Oct 06 13:42:57 crc kubenswrapper[4757]: I1006 13:42:57.279023 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aee16345-631c-48f8-a83b-bd30a9c62c60-catalog-content\") pod \"certified-operators-lmdhq\" (UID: \"aee16345-631c-48f8-a83b-bd30a9c62c60\") " pod="openshift-marketplace/certified-operators-lmdhq" Oct 06 13:42:57 crc kubenswrapper[4757]: I1006 13:42:57.279083 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nq45n\" (UniqueName: \"kubernetes.io/projected/aee16345-631c-48f8-a83b-bd30a9c62c60-kube-api-access-nq45n\") pod \"certified-operators-lmdhq\" (UID: \"aee16345-631c-48f8-a83b-bd30a9c62c60\") " pod="openshift-marketplace/certified-operators-lmdhq" Oct 06 13:42:57 crc kubenswrapper[4757]: I1006 13:42:57.383278 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-6hsbt"] Oct 06 13:42:57 crc kubenswrapper[4757]: I1006 13:42:57.384197 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6hsbt" Oct 06 13:42:57 crc kubenswrapper[4757]: I1006 13:42:57.385279 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aee16345-631c-48f8-a83b-bd30a9c62c60-catalog-content\") pod \"certified-operators-lmdhq\" (UID: \"aee16345-631c-48f8-a83b-bd30a9c62c60\") " pod="openshift-marketplace/certified-operators-lmdhq" Oct 06 13:42:57 crc kubenswrapper[4757]: I1006 13:42:57.385446 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nq45n\" (UniqueName: \"kubernetes.io/projected/aee16345-631c-48f8-a83b-bd30a9c62c60-kube-api-access-nq45n\") pod \"certified-operators-lmdhq\" (UID: \"aee16345-631c-48f8-a83b-bd30a9c62c60\") " pod="openshift-marketplace/certified-operators-lmdhq" Oct 06 13:42:57 crc kubenswrapper[4757]: I1006 13:42:57.385809 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aee16345-631c-48f8-a83b-bd30a9c62c60-catalog-content\") pod \"certified-operators-lmdhq\" (UID: \"aee16345-631c-48f8-a83b-bd30a9c62c60\") " pod="openshift-marketplace/certified-operators-lmdhq" Oct 06 13:42:57 crc kubenswrapper[4757]: I1006 13:42:57.386224 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aee16345-631c-48f8-a83b-bd30a9c62c60-utilities\") pod \"certified-operators-lmdhq\" (UID: \"aee16345-631c-48f8-a83b-bd30a9c62c60\") " pod="openshift-marketplace/certified-operators-lmdhq" Oct 06 13:42:57 crc kubenswrapper[4757]: I1006 13:42:57.387322 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Oct 06 13:42:57 crc kubenswrapper[4757]: I1006 13:42:57.389159 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aee16345-631c-48f8-a83b-bd30a9c62c60-utilities\") pod \"certified-operators-lmdhq\" (UID: \"aee16345-631c-48f8-a83b-bd30a9c62c60\") " pod="openshift-marketplace/certified-operators-lmdhq" Oct 06 13:42:57 crc kubenswrapper[4757]: I1006 13:42:57.395440 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-6hsbt"] Oct 06 13:42:57 crc kubenswrapper[4757]: I1006 13:42:57.409949 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nq45n\" (UniqueName: \"kubernetes.io/projected/aee16345-631c-48f8-a83b-bd30a9c62c60-kube-api-access-nq45n\") pod \"certified-operators-lmdhq\" (UID: \"aee16345-631c-48f8-a83b-bd30a9c62c60\") " pod="openshift-marketplace/certified-operators-lmdhq" Oct 06 13:42:57 crc kubenswrapper[4757]: I1006 13:42:57.497402 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/055ac91e-78ce-47e5-91b2-cca82eb82bee-catalog-content\") pod \"community-operators-6hsbt\" (UID: \"055ac91e-78ce-47e5-91b2-cca82eb82bee\") " pod="openshift-marketplace/community-operators-6hsbt" Oct 06 13:42:57 crc kubenswrapper[4757]: I1006 13:42:57.497461 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/055ac91e-78ce-47e5-91b2-cca82eb82bee-utilities\") pod \"community-operators-6hsbt\" (UID: \"055ac91e-78ce-47e5-91b2-cca82eb82bee\") " pod="openshift-marketplace/community-operators-6hsbt" Oct 06 13:42:57 crc kubenswrapper[4757]: I1006 13:42:57.497614 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6m8td\" (UniqueName: \"kubernetes.io/projected/055ac91e-78ce-47e5-91b2-cca82eb82bee-kube-api-access-6m8td\") pod \"community-operators-6hsbt\" (UID: \"055ac91e-78ce-47e5-91b2-cca82eb82bee\") " pod="openshift-marketplace/community-operators-6hsbt" Oct 06 13:42:57 crc kubenswrapper[4757]: I1006 13:42:57.539694 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lmdhq" Oct 06 13:42:57 crc kubenswrapper[4757]: I1006 13:42:57.599397 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6m8td\" (UniqueName: \"kubernetes.io/projected/055ac91e-78ce-47e5-91b2-cca82eb82bee-kube-api-access-6m8td\") pod \"community-operators-6hsbt\" (UID: \"055ac91e-78ce-47e5-91b2-cca82eb82bee\") " pod="openshift-marketplace/community-operators-6hsbt" Oct 06 13:42:57 crc kubenswrapper[4757]: I1006 13:42:57.599786 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/055ac91e-78ce-47e5-91b2-cca82eb82bee-catalog-content\") pod \"community-operators-6hsbt\" (UID: \"055ac91e-78ce-47e5-91b2-cca82eb82bee\") " pod="openshift-marketplace/community-operators-6hsbt" Oct 06 13:42:57 crc kubenswrapper[4757]: I1006 13:42:57.599814 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/055ac91e-78ce-47e5-91b2-cca82eb82bee-utilities\") pod \"community-operators-6hsbt\" (UID: \"055ac91e-78ce-47e5-91b2-cca82eb82bee\") " pod="openshift-marketplace/community-operators-6hsbt" Oct 06 13:42:57 crc kubenswrapper[4757]: I1006 13:42:57.600591 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/055ac91e-78ce-47e5-91b2-cca82eb82bee-catalog-content\") pod \"community-operators-6hsbt\" (UID: \"055ac91e-78ce-47e5-91b2-cca82eb82bee\") " pod="openshift-marketplace/community-operators-6hsbt" Oct 06 13:42:57 crc kubenswrapper[4757]: I1006 13:42:57.600679 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/055ac91e-78ce-47e5-91b2-cca82eb82bee-utilities\") pod \"community-operators-6hsbt\" (UID: \"055ac91e-78ce-47e5-91b2-cca82eb82bee\") " pod="openshift-marketplace/community-operators-6hsbt" Oct 06 13:42:57 crc kubenswrapper[4757]: I1006 13:42:57.630310 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6m8td\" (UniqueName: \"kubernetes.io/projected/055ac91e-78ce-47e5-91b2-cca82eb82bee-kube-api-access-6m8td\") pod \"community-operators-6hsbt\" (UID: \"055ac91e-78ce-47e5-91b2-cca82eb82bee\") " pod="openshift-marketplace/community-operators-6hsbt" Oct 06 13:42:57 crc kubenswrapper[4757]: I1006 13:42:57.710839 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6hsbt" Oct 06 13:42:57 crc kubenswrapper[4757]: I1006 13:42:57.932463 4757 generic.go:334] "Generic (PLEG): container finished" podID="2803bf75-1028-41a4-9533-e4ec37c0872a" containerID="1c15984bf593ab6c6c09c711e2465242a088b82c0ff1d922662dfa58cb0ec2cd" exitCode=0 Oct 06 13:42:57 crc kubenswrapper[4757]: I1006 13:42:57.932575 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hfhqn" event={"ID":"2803bf75-1028-41a4-9533-e4ec37c0872a","Type":"ContainerDied","Data":"1c15984bf593ab6c6c09c711e2465242a088b82c0ff1d922662dfa58cb0ec2cd"} Oct 06 13:42:57 crc kubenswrapper[4757]: I1006 13:42:57.951714 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-lmdhq"] Oct 06 13:42:57 crc kubenswrapper[4757]: W1006 13:42:57.959380 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podaee16345_631c_48f8_a83b_bd30a9c62c60.slice/crio-8185f3bbd3cd54bcb64ad1907644ae49c0c8d3249cfd78824980d96055eb8f0f WatchSource:0}: Error finding container 8185f3bbd3cd54bcb64ad1907644ae49c0c8d3249cfd78824980d96055eb8f0f: Status 404 returned error can't find the container with id 8185f3bbd3cd54bcb64ad1907644ae49c0c8d3249cfd78824980d96055eb8f0f Oct 06 13:42:58 crc kubenswrapper[4757]: I1006 13:42:58.103751 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-6hsbt"] Oct 06 13:42:58 crc kubenswrapper[4757]: W1006 13:42:58.163984 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod055ac91e_78ce_47e5_91b2_cca82eb82bee.slice/crio-e4ff590367434c2574eb8434a05b88edeb941ad374ff97d7b8c62932f196c7e2 WatchSource:0}: Error finding container e4ff590367434c2574eb8434a05b88edeb941ad374ff97d7b8c62932f196c7e2: Status 404 returned error can't find the container with id e4ff590367434c2574eb8434a05b88edeb941ad374ff97d7b8c62932f196c7e2 Oct 06 13:42:58 crc kubenswrapper[4757]: I1006 13:42:58.941434 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hfhqn" event={"ID":"2803bf75-1028-41a4-9533-e4ec37c0872a","Type":"ContainerStarted","Data":"b0c2e39c78a4f91313cc8535e8270258e12699a905febd4d7906aad35ad808b0"} Oct 06 13:42:58 crc kubenswrapper[4757]: I1006 13:42:58.944458 4757 generic.go:334] "Generic (PLEG): container finished" podID="aee16345-631c-48f8-a83b-bd30a9c62c60" containerID="2d2e97283821c4be4ac239d7aceb1642d11c3ca2a6c916e5463fc4c8e30a89ab" exitCode=0 Oct 06 13:42:58 crc kubenswrapper[4757]: I1006 13:42:58.944591 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lmdhq" event={"ID":"aee16345-631c-48f8-a83b-bd30a9c62c60","Type":"ContainerDied","Data":"2d2e97283821c4be4ac239d7aceb1642d11c3ca2a6c916e5463fc4c8e30a89ab"} Oct 06 13:42:58 crc kubenswrapper[4757]: I1006 13:42:58.944680 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lmdhq" event={"ID":"aee16345-631c-48f8-a83b-bd30a9c62c60","Type":"ContainerStarted","Data":"8185f3bbd3cd54bcb64ad1907644ae49c0c8d3249cfd78824980d96055eb8f0f"} Oct 06 13:42:58 crc kubenswrapper[4757]: I1006 13:42:58.948128 4757 generic.go:334] "Generic (PLEG): container finished" podID="055ac91e-78ce-47e5-91b2-cca82eb82bee" containerID="905378c8a93eb8a681994d6ce78c31850e4de7733b891a7ab7e93b2541f7b6f5" exitCode=0 Oct 06 13:42:58 crc kubenswrapper[4757]: I1006 13:42:58.948494 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6hsbt" event={"ID":"055ac91e-78ce-47e5-91b2-cca82eb82bee","Type":"ContainerDied","Data":"905378c8a93eb8a681994d6ce78c31850e4de7733b891a7ab7e93b2541f7b6f5"} Oct 06 13:42:58 crc kubenswrapper[4757]: I1006 13:42:58.948555 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6hsbt" event={"ID":"055ac91e-78ce-47e5-91b2-cca82eb82bee","Type":"ContainerStarted","Data":"e4ff590367434c2574eb8434a05b88edeb941ad374ff97d7b8c62932f196c7e2"} Oct 06 13:42:58 crc kubenswrapper[4757]: I1006 13:42:58.952312 4757 generic.go:334] "Generic (PLEG): container finished" podID="4251acbc-50b9-4729-8fac-0186245abb51" containerID="e8dfeafe9b988f9b21d1653abd90e66c96c15bf4bd22a4dc496a787fb5d8977e" exitCode=0 Oct 06 13:42:58 crc kubenswrapper[4757]: I1006 13:42:58.952372 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rlx44" event={"ID":"4251acbc-50b9-4729-8fac-0186245abb51","Type":"ContainerDied","Data":"e8dfeafe9b988f9b21d1653abd90e66c96c15bf4bd22a4dc496a787fb5d8977e"} Oct 06 13:42:58 crc kubenswrapper[4757]: I1006 13:42:58.975462 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-hfhqn" podStartSLOduration=2.147405633 podStartE2EDuration="4.975433563s" podCreationTimestamp="2025-10-06 13:42:54 +0000 UTC" firstStartedPulling="2025-10-06 13:42:55.919683192 +0000 UTC m=+264.417001729" lastFinishedPulling="2025-10-06 13:42:58.747711122 +0000 UTC m=+267.245029659" observedRunningTime="2025-10-06 13:42:58.961994529 +0000 UTC m=+267.459313096" watchObservedRunningTime="2025-10-06 13:42:58.975433563 +0000 UTC m=+267.472752130" Oct 06 13:42:59 crc kubenswrapper[4757]: I1006 13:42:59.964150 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lmdhq" event={"ID":"aee16345-631c-48f8-a83b-bd30a9c62c60","Type":"ContainerStarted","Data":"7d553c669e0d9fbf220c8f8fe04a2c7d54b2be0c86205e9d1ac7d0d1c3a99602"} Oct 06 13:43:00 crc kubenswrapper[4757]: I1006 13:43:00.973218 4757 generic.go:334] "Generic (PLEG): container finished" podID="aee16345-631c-48f8-a83b-bd30a9c62c60" containerID="7d553c669e0d9fbf220c8f8fe04a2c7d54b2be0c86205e9d1ac7d0d1c3a99602" exitCode=0 Oct 06 13:43:00 crc kubenswrapper[4757]: I1006 13:43:00.973287 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lmdhq" event={"ID":"aee16345-631c-48f8-a83b-bd30a9c62c60","Type":"ContainerDied","Data":"7d553c669e0d9fbf220c8f8fe04a2c7d54b2be0c86205e9d1ac7d0d1c3a99602"} Oct 06 13:43:01 crc kubenswrapper[4757]: I1006 13:43:01.979320 4757 generic.go:334] "Generic (PLEG): container finished" podID="055ac91e-78ce-47e5-91b2-cca82eb82bee" containerID="2fb09a3ac106ba0a1f596a723abad8f9a12176a1f00536a03121cab865e7743e" exitCode=0 Oct 06 13:43:01 crc kubenswrapper[4757]: I1006 13:43:01.979372 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6hsbt" event={"ID":"055ac91e-78ce-47e5-91b2-cca82eb82bee","Type":"ContainerDied","Data":"2fb09a3ac106ba0a1f596a723abad8f9a12176a1f00536a03121cab865e7743e"} Oct 06 13:43:01 crc kubenswrapper[4757]: I1006 13:43:01.981938 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rlx44" event={"ID":"4251acbc-50b9-4729-8fac-0186245abb51","Type":"ContainerStarted","Data":"a9d0f3c746e4c8c9240dcc8f966415bbe73f11a705cfb36d92432fd68e42dc85"} Oct 06 13:43:01 crc kubenswrapper[4757]: I1006 13:43:01.985768 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lmdhq" event={"ID":"aee16345-631c-48f8-a83b-bd30a9c62c60","Type":"ContainerStarted","Data":"395402b79fab0ba65d1706879e6453121eced8fc1d8d13f3f9f65f772582de0d"} Oct 06 13:43:02 crc kubenswrapper[4757]: I1006 13:43:02.011301 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-lmdhq" podStartSLOduration=2.313257544 podStartE2EDuration="5.011277846s" podCreationTimestamp="2025-10-06 13:42:57 +0000 UTC" firstStartedPulling="2025-10-06 13:42:58.945965366 +0000 UTC m=+267.443283903" lastFinishedPulling="2025-10-06 13:43:01.643985668 +0000 UTC m=+270.141304205" observedRunningTime="2025-10-06 13:43:02.011002488 +0000 UTC m=+270.508321025" watchObservedRunningTime="2025-10-06 13:43:02.011277846 +0000 UTC m=+270.508596383" Oct 06 13:43:02 crc kubenswrapper[4757]: I1006 13:43:02.025420 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-rlx44" podStartSLOduration=5.407557067 podStartE2EDuration="8.025399972s" podCreationTimestamp="2025-10-06 13:42:54 +0000 UTC" firstStartedPulling="2025-10-06 13:42:56.990357056 +0000 UTC m=+265.487675603" lastFinishedPulling="2025-10-06 13:42:59.608199951 +0000 UTC m=+268.105518508" observedRunningTime="2025-10-06 13:43:02.023976029 +0000 UTC m=+270.521294586" watchObservedRunningTime="2025-10-06 13:43:02.025399972 +0000 UTC m=+270.522718509" Oct 06 13:43:04 crc kubenswrapper[4757]: I1006 13:43:04.010000 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6hsbt" event={"ID":"055ac91e-78ce-47e5-91b2-cca82eb82bee","Type":"ContainerStarted","Data":"5182896121f09de0df6a5bf841c200c056c49802d0db7ec69e148105c5dea4a3"} Oct 06 13:43:04 crc kubenswrapper[4757]: I1006 13:43:04.033492 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-6hsbt" podStartSLOduration=2.7312736109999998 podStartE2EDuration="7.033474754s" podCreationTimestamp="2025-10-06 13:42:57 +0000 UTC" firstStartedPulling="2025-10-06 13:42:58.949912995 +0000 UTC m=+267.447231552" lastFinishedPulling="2025-10-06 13:43:03.252114158 +0000 UTC m=+271.749432695" observedRunningTime="2025-10-06 13:43:04.031300028 +0000 UTC m=+272.528618585" watchObservedRunningTime="2025-10-06 13:43:04.033474754 +0000 UTC m=+272.530793291" Oct 06 13:43:05 crc kubenswrapper[4757]: I1006 13:43:05.121970 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-hfhqn" Oct 06 13:43:05 crc kubenswrapper[4757]: I1006 13:43:05.122021 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-hfhqn" Oct 06 13:43:05 crc kubenswrapper[4757]: I1006 13:43:05.174872 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-hfhqn" Oct 06 13:43:05 crc kubenswrapper[4757]: I1006 13:43:05.310055 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-rlx44" Oct 06 13:43:05 crc kubenswrapper[4757]: I1006 13:43:05.310528 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-rlx44" Oct 06 13:43:06 crc kubenswrapper[4757]: I1006 13:43:06.071160 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-hfhqn" Oct 06 13:43:06 crc kubenswrapper[4757]: I1006 13:43:06.347691 4757 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-rlx44" podUID="4251acbc-50b9-4729-8fac-0186245abb51" containerName="registry-server" probeResult="failure" output=< Oct 06 13:43:06 crc kubenswrapper[4757]: timeout: failed to connect service ":50051" within 1s Oct 06 13:43:06 crc kubenswrapper[4757]: > Oct 06 13:43:07 crc kubenswrapper[4757]: I1006 13:43:07.540734 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-lmdhq" Oct 06 13:43:07 crc kubenswrapper[4757]: I1006 13:43:07.540776 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-lmdhq" Oct 06 13:43:07 crc kubenswrapper[4757]: I1006 13:43:07.607788 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-lmdhq" Oct 06 13:43:07 crc kubenswrapper[4757]: I1006 13:43:07.718340 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-6hsbt" Oct 06 13:43:07 crc kubenswrapper[4757]: I1006 13:43:07.718509 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-6hsbt" Oct 06 13:43:07 crc kubenswrapper[4757]: I1006 13:43:07.773069 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-6hsbt" Oct 06 13:43:08 crc kubenswrapper[4757]: I1006 13:43:08.072511 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-6hsbt" Oct 06 13:43:08 crc kubenswrapper[4757]: I1006 13:43:08.077428 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-lmdhq" Oct 06 13:43:15 crc kubenswrapper[4757]: I1006 13:43:15.354035 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-rlx44" Oct 06 13:43:15 crc kubenswrapper[4757]: I1006 13:43:15.400045 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-rlx44" Oct 06 13:44:34 crc kubenswrapper[4757]: I1006 13:44:34.360913 4757 patch_prober.go:28] interesting pod/machine-config-daemon-7tb7h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 06 13:44:34 crc kubenswrapper[4757]: I1006 13:44:34.361688 4757 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 06 13:45:00 crc kubenswrapper[4757]: I1006 13:45:00.144529 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29329305-824dj"] Oct 06 13:45:00 crc kubenswrapper[4757]: I1006 13:45:00.146260 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29329305-824dj" Oct 06 13:45:00 crc kubenswrapper[4757]: I1006 13:45:00.149208 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 06 13:45:00 crc kubenswrapper[4757]: I1006 13:45:00.150485 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 06 13:45:00 crc kubenswrapper[4757]: I1006 13:45:00.155056 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29329305-824dj"] Oct 06 13:45:00 crc kubenswrapper[4757]: I1006 13:45:00.237344 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e38f0f63-c4cf-464b-8daa-28560496b8e3-secret-volume\") pod \"collect-profiles-29329305-824dj\" (UID: \"e38f0f63-c4cf-464b-8daa-28560496b8e3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29329305-824dj" Oct 06 13:45:00 crc kubenswrapper[4757]: I1006 13:45:00.237828 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e38f0f63-c4cf-464b-8daa-28560496b8e3-config-volume\") pod \"collect-profiles-29329305-824dj\" (UID: \"e38f0f63-c4cf-464b-8daa-28560496b8e3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29329305-824dj" Oct 06 13:45:00 crc kubenswrapper[4757]: I1006 13:45:00.237961 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2qq4f\" (UniqueName: \"kubernetes.io/projected/e38f0f63-c4cf-464b-8daa-28560496b8e3-kube-api-access-2qq4f\") pod \"collect-profiles-29329305-824dj\" (UID: \"e38f0f63-c4cf-464b-8daa-28560496b8e3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29329305-824dj" Oct 06 13:45:00 crc kubenswrapper[4757]: I1006 13:45:00.339479 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e38f0f63-c4cf-464b-8daa-28560496b8e3-secret-volume\") pod \"collect-profiles-29329305-824dj\" (UID: \"e38f0f63-c4cf-464b-8daa-28560496b8e3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29329305-824dj" Oct 06 13:45:00 crc kubenswrapper[4757]: I1006 13:45:00.339600 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e38f0f63-c4cf-464b-8daa-28560496b8e3-config-volume\") pod \"collect-profiles-29329305-824dj\" (UID: \"e38f0f63-c4cf-464b-8daa-28560496b8e3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29329305-824dj" Oct 06 13:45:00 crc kubenswrapper[4757]: I1006 13:45:00.339639 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2qq4f\" (UniqueName: \"kubernetes.io/projected/e38f0f63-c4cf-464b-8daa-28560496b8e3-kube-api-access-2qq4f\") pod \"collect-profiles-29329305-824dj\" (UID: \"e38f0f63-c4cf-464b-8daa-28560496b8e3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29329305-824dj" Oct 06 13:45:00 crc kubenswrapper[4757]: I1006 13:45:00.341517 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e38f0f63-c4cf-464b-8daa-28560496b8e3-config-volume\") pod \"collect-profiles-29329305-824dj\" (UID: \"e38f0f63-c4cf-464b-8daa-28560496b8e3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29329305-824dj" Oct 06 13:45:00 crc kubenswrapper[4757]: I1006 13:45:00.349235 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e38f0f63-c4cf-464b-8daa-28560496b8e3-secret-volume\") pod \"collect-profiles-29329305-824dj\" (UID: \"e38f0f63-c4cf-464b-8daa-28560496b8e3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29329305-824dj" Oct 06 13:45:00 crc kubenswrapper[4757]: I1006 13:45:00.357131 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2qq4f\" (UniqueName: \"kubernetes.io/projected/e38f0f63-c4cf-464b-8daa-28560496b8e3-kube-api-access-2qq4f\") pod \"collect-profiles-29329305-824dj\" (UID: \"e38f0f63-c4cf-464b-8daa-28560496b8e3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29329305-824dj" Oct 06 13:45:00 crc kubenswrapper[4757]: I1006 13:45:00.473683 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29329305-824dj" Oct 06 13:45:00 crc kubenswrapper[4757]: I1006 13:45:00.657261 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29329305-824dj"] Oct 06 13:45:00 crc kubenswrapper[4757]: I1006 13:45:00.675371 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29329305-824dj" event={"ID":"e38f0f63-c4cf-464b-8daa-28560496b8e3","Type":"ContainerStarted","Data":"95199fa528f7a379ae51c60556f142cea53bb207a69d68cd421cf0ba513fa74f"} Oct 06 13:45:01 crc kubenswrapper[4757]: I1006 13:45:01.683799 4757 generic.go:334] "Generic (PLEG): container finished" podID="e38f0f63-c4cf-464b-8daa-28560496b8e3" containerID="7e8fd278f05575a50202632745aeea7ae16a3a20c85ec4e429878474f281cd15" exitCode=0 Oct 06 13:45:01 crc kubenswrapper[4757]: I1006 13:45:01.683856 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29329305-824dj" event={"ID":"e38f0f63-c4cf-464b-8daa-28560496b8e3","Type":"ContainerDied","Data":"7e8fd278f05575a50202632745aeea7ae16a3a20c85ec4e429878474f281cd15"} Oct 06 13:45:03 crc kubenswrapper[4757]: I1006 13:45:03.004273 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29329305-824dj" Oct 06 13:45:03 crc kubenswrapper[4757]: I1006 13:45:03.180417 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e38f0f63-c4cf-464b-8daa-28560496b8e3-config-volume\") pod \"e38f0f63-c4cf-464b-8daa-28560496b8e3\" (UID: \"e38f0f63-c4cf-464b-8daa-28560496b8e3\") " Oct 06 13:45:03 crc kubenswrapper[4757]: I1006 13:45:03.180523 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2qq4f\" (UniqueName: \"kubernetes.io/projected/e38f0f63-c4cf-464b-8daa-28560496b8e3-kube-api-access-2qq4f\") pod \"e38f0f63-c4cf-464b-8daa-28560496b8e3\" (UID: \"e38f0f63-c4cf-464b-8daa-28560496b8e3\") " Oct 06 13:45:03 crc kubenswrapper[4757]: I1006 13:45:03.180661 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e38f0f63-c4cf-464b-8daa-28560496b8e3-secret-volume\") pod \"e38f0f63-c4cf-464b-8daa-28560496b8e3\" (UID: \"e38f0f63-c4cf-464b-8daa-28560496b8e3\") " Oct 06 13:45:03 crc kubenswrapper[4757]: I1006 13:45:03.182046 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e38f0f63-c4cf-464b-8daa-28560496b8e3-config-volume" (OuterVolumeSpecName: "config-volume") pod "e38f0f63-c4cf-464b-8daa-28560496b8e3" (UID: "e38f0f63-c4cf-464b-8daa-28560496b8e3"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:45:03 crc kubenswrapper[4757]: I1006 13:45:03.189488 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e38f0f63-c4cf-464b-8daa-28560496b8e3-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "e38f0f63-c4cf-464b-8daa-28560496b8e3" (UID: "e38f0f63-c4cf-464b-8daa-28560496b8e3"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:45:03 crc kubenswrapper[4757]: I1006 13:45:03.190431 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e38f0f63-c4cf-464b-8daa-28560496b8e3-kube-api-access-2qq4f" (OuterVolumeSpecName: "kube-api-access-2qq4f") pod "e38f0f63-c4cf-464b-8daa-28560496b8e3" (UID: "e38f0f63-c4cf-464b-8daa-28560496b8e3"). InnerVolumeSpecName "kube-api-access-2qq4f". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:45:03 crc kubenswrapper[4757]: I1006 13:45:03.283342 4757 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e38f0f63-c4cf-464b-8daa-28560496b8e3-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 06 13:45:03 crc kubenswrapper[4757]: I1006 13:45:03.283924 4757 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e38f0f63-c4cf-464b-8daa-28560496b8e3-config-volume\") on node \"crc\" DevicePath \"\"" Oct 06 13:45:03 crc kubenswrapper[4757]: I1006 13:45:03.283954 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2qq4f\" (UniqueName: \"kubernetes.io/projected/e38f0f63-c4cf-464b-8daa-28560496b8e3-kube-api-access-2qq4f\") on node \"crc\" DevicePath \"\"" Oct 06 13:45:03 crc kubenswrapper[4757]: I1006 13:45:03.695232 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29329305-824dj" event={"ID":"e38f0f63-c4cf-464b-8daa-28560496b8e3","Type":"ContainerDied","Data":"95199fa528f7a379ae51c60556f142cea53bb207a69d68cd421cf0ba513fa74f"} Oct 06 13:45:03 crc kubenswrapper[4757]: I1006 13:45:03.695283 4757 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="95199fa528f7a379ae51c60556f142cea53bb207a69d68cd421cf0ba513fa74f" Oct 06 13:45:03 crc kubenswrapper[4757]: I1006 13:45:03.695316 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29329305-824dj" Oct 06 13:45:04 crc kubenswrapper[4757]: I1006 13:45:04.362963 4757 patch_prober.go:28] interesting pod/machine-config-daemon-7tb7h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 06 13:45:04 crc kubenswrapper[4757]: I1006 13:45:04.363085 4757 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 06 13:45:34 crc kubenswrapper[4757]: I1006 13:45:34.361273 4757 patch_prober.go:28] interesting pod/machine-config-daemon-7tb7h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 06 13:45:34 crc kubenswrapper[4757]: I1006 13:45:34.362079 4757 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 06 13:45:34 crc kubenswrapper[4757]: I1006 13:45:34.362177 4757 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" Oct 06 13:45:34 crc kubenswrapper[4757]: I1006 13:45:34.362911 4757 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"cbd63c2fd83eb3b13b70ca6d17410007d8a08e6fecff91ef597ee01a17e3a5db"} pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 06 13:45:34 crc kubenswrapper[4757]: I1006 13:45:34.362994 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" containerName="machine-config-daemon" containerID="cri-o://cbd63c2fd83eb3b13b70ca6d17410007d8a08e6fecff91ef597ee01a17e3a5db" gracePeriod=600 Oct 06 13:45:34 crc kubenswrapper[4757]: I1006 13:45:34.904193 4757 generic.go:334] "Generic (PLEG): container finished" podID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" containerID="cbd63c2fd83eb3b13b70ca6d17410007d8a08e6fecff91ef597ee01a17e3a5db" exitCode=0 Oct 06 13:45:34 crc kubenswrapper[4757]: I1006 13:45:34.904274 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" event={"ID":"0010c888-d5ad-4b2b-8309-1647fdf0dee3","Type":"ContainerDied","Data":"cbd63c2fd83eb3b13b70ca6d17410007d8a08e6fecff91ef597ee01a17e3a5db"} Oct 06 13:45:34 crc kubenswrapper[4757]: I1006 13:45:34.905100 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" event={"ID":"0010c888-d5ad-4b2b-8309-1647fdf0dee3","Type":"ContainerStarted","Data":"c1caaff02f46fc52972611163537c1807a4d146f35cf5ae6f7d131326516fd78"} Oct 06 13:45:34 crc kubenswrapper[4757]: I1006 13:45:34.905145 4757 scope.go:117] "RemoveContainer" containerID="151c97566cda63000d69b51293c84fad70c6e331d3a1b62bd3a03ee5732d5214" Oct 06 13:45:52 crc kubenswrapper[4757]: I1006 13:45:52.734144 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-qcgqd"] Oct 06 13:45:52 crc kubenswrapper[4757]: E1006 13:45:52.735361 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e38f0f63-c4cf-464b-8daa-28560496b8e3" containerName="collect-profiles" Oct 06 13:45:52 crc kubenswrapper[4757]: I1006 13:45:52.735382 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="e38f0f63-c4cf-464b-8daa-28560496b8e3" containerName="collect-profiles" Oct 06 13:45:52 crc kubenswrapper[4757]: I1006 13:45:52.735554 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="e38f0f63-c4cf-464b-8daa-28560496b8e3" containerName="collect-profiles" Oct 06 13:45:52 crc kubenswrapper[4757]: I1006 13:45:52.736189 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-qcgqd" Oct 06 13:45:52 crc kubenswrapper[4757]: I1006 13:45:52.755243 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-qcgqd"] Oct 06 13:45:52 crc kubenswrapper[4757]: I1006 13:45:52.870042 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/02ebf06b-3a23-4030-ace5-52eac64985ef-registry-tls\") pod \"image-registry-66df7c8f76-qcgqd\" (UID: \"02ebf06b-3a23-4030-ace5-52eac64985ef\") " pod="openshift-image-registry/image-registry-66df7c8f76-qcgqd" Oct 06 13:45:52 crc kubenswrapper[4757]: I1006 13:45:52.870130 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-qcgqd\" (UID: \"02ebf06b-3a23-4030-ace5-52eac64985ef\") " pod="openshift-image-registry/image-registry-66df7c8f76-qcgqd" Oct 06 13:45:52 crc kubenswrapper[4757]: I1006 13:45:52.870176 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/02ebf06b-3a23-4030-ace5-52eac64985ef-trusted-ca\") pod \"image-registry-66df7c8f76-qcgqd\" (UID: \"02ebf06b-3a23-4030-ace5-52eac64985ef\") " pod="openshift-image-registry/image-registry-66df7c8f76-qcgqd" Oct 06 13:45:52 crc kubenswrapper[4757]: I1006 13:45:52.870206 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/02ebf06b-3a23-4030-ace5-52eac64985ef-registry-certificates\") pod \"image-registry-66df7c8f76-qcgqd\" (UID: \"02ebf06b-3a23-4030-ace5-52eac64985ef\") " pod="openshift-image-registry/image-registry-66df7c8f76-qcgqd" Oct 06 13:45:52 crc kubenswrapper[4757]: I1006 13:45:52.870225 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/02ebf06b-3a23-4030-ace5-52eac64985ef-ca-trust-extracted\") pod \"image-registry-66df7c8f76-qcgqd\" (UID: \"02ebf06b-3a23-4030-ace5-52eac64985ef\") " pod="openshift-image-registry/image-registry-66df7c8f76-qcgqd" Oct 06 13:45:52 crc kubenswrapper[4757]: I1006 13:45:52.870255 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/02ebf06b-3a23-4030-ace5-52eac64985ef-installation-pull-secrets\") pod \"image-registry-66df7c8f76-qcgqd\" (UID: \"02ebf06b-3a23-4030-ace5-52eac64985ef\") " pod="openshift-image-registry/image-registry-66df7c8f76-qcgqd" Oct 06 13:45:52 crc kubenswrapper[4757]: I1006 13:45:52.870280 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jlfbn\" (UniqueName: \"kubernetes.io/projected/02ebf06b-3a23-4030-ace5-52eac64985ef-kube-api-access-jlfbn\") pod \"image-registry-66df7c8f76-qcgqd\" (UID: \"02ebf06b-3a23-4030-ace5-52eac64985ef\") " pod="openshift-image-registry/image-registry-66df7c8f76-qcgqd" Oct 06 13:45:52 crc kubenswrapper[4757]: I1006 13:45:52.870495 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/02ebf06b-3a23-4030-ace5-52eac64985ef-bound-sa-token\") pod \"image-registry-66df7c8f76-qcgqd\" (UID: \"02ebf06b-3a23-4030-ace5-52eac64985ef\") " pod="openshift-image-registry/image-registry-66df7c8f76-qcgqd" Oct 06 13:45:52 crc kubenswrapper[4757]: I1006 13:45:52.891579 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-qcgqd\" (UID: \"02ebf06b-3a23-4030-ace5-52eac64985ef\") " pod="openshift-image-registry/image-registry-66df7c8f76-qcgqd" Oct 06 13:45:52 crc kubenswrapper[4757]: I1006 13:45:52.972264 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/02ebf06b-3a23-4030-ace5-52eac64985ef-trusted-ca\") pod \"image-registry-66df7c8f76-qcgqd\" (UID: \"02ebf06b-3a23-4030-ace5-52eac64985ef\") " pod="openshift-image-registry/image-registry-66df7c8f76-qcgqd" Oct 06 13:45:52 crc kubenswrapper[4757]: I1006 13:45:52.972319 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/02ebf06b-3a23-4030-ace5-52eac64985ef-registry-certificates\") pod \"image-registry-66df7c8f76-qcgqd\" (UID: \"02ebf06b-3a23-4030-ace5-52eac64985ef\") " pod="openshift-image-registry/image-registry-66df7c8f76-qcgqd" Oct 06 13:45:52 crc kubenswrapper[4757]: I1006 13:45:52.972354 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/02ebf06b-3a23-4030-ace5-52eac64985ef-ca-trust-extracted\") pod \"image-registry-66df7c8f76-qcgqd\" (UID: \"02ebf06b-3a23-4030-ace5-52eac64985ef\") " pod="openshift-image-registry/image-registry-66df7c8f76-qcgqd" Oct 06 13:45:52 crc kubenswrapper[4757]: I1006 13:45:52.972403 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/02ebf06b-3a23-4030-ace5-52eac64985ef-installation-pull-secrets\") pod \"image-registry-66df7c8f76-qcgqd\" (UID: \"02ebf06b-3a23-4030-ace5-52eac64985ef\") " pod="openshift-image-registry/image-registry-66df7c8f76-qcgqd" Oct 06 13:45:52 crc kubenswrapper[4757]: I1006 13:45:52.972443 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jlfbn\" (UniqueName: \"kubernetes.io/projected/02ebf06b-3a23-4030-ace5-52eac64985ef-kube-api-access-jlfbn\") pod \"image-registry-66df7c8f76-qcgqd\" (UID: \"02ebf06b-3a23-4030-ace5-52eac64985ef\") " pod="openshift-image-registry/image-registry-66df7c8f76-qcgqd" Oct 06 13:45:52 crc kubenswrapper[4757]: I1006 13:45:52.972481 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/02ebf06b-3a23-4030-ace5-52eac64985ef-bound-sa-token\") pod \"image-registry-66df7c8f76-qcgqd\" (UID: \"02ebf06b-3a23-4030-ace5-52eac64985ef\") " pod="openshift-image-registry/image-registry-66df7c8f76-qcgqd" Oct 06 13:45:52 crc kubenswrapper[4757]: I1006 13:45:52.972537 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/02ebf06b-3a23-4030-ace5-52eac64985ef-registry-tls\") pod \"image-registry-66df7c8f76-qcgqd\" (UID: \"02ebf06b-3a23-4030-ace5-52eac64985ef\") " pod="openshift-image-registry/image-registry-66df7c8f76-qcgqd" Oct 06 13:45:52 crc kubenswrapper[4757]: I1006 13:45:52.973123 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/02ebf06b-3a23-4030-ace5-52eac64985ef-ca-trust-extracted\") pod \"image-registry-66df7c8f76-qcgqd\" (UID: \"02ebf06b-3a23-4030-ace5-52eac64985ef\") " pod="openshift-image-registry/image-registry-66df7c8f76-qcgqd" Oct 06 13:45:52 crc kubenswrapper[4757]: I1006 13:45:52.973688 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/02ebf06b-3a23-4030-ace5-52eac64985ef-trusted-ca\") pod \"image-registry-66df7c8f76-qcgqd\" (UID: \"02ebf06b-3a23-4030-ace5-52eac64985ef\") " pod="openshift-image-registry/image-registry-66df7c8f76-qcgqd" Oct 06 13:45:52 crc kubenswrapper[4757]: I1006 13:45:52.974395 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/02ebf06b-3a23-4030-ace5-52eac64985ef-registry-certificates\") pod \"image-registry-66df7c8f76-qcgqd\" (UID: \"02ebf06b-3a23-4030-ace5-52eac64985ef\") " pod="openshift-image-registry/image-registry-66df7c8f76-qcgqd" Oct 06 13:45:52 crc kubenswrapper[4757]: I1006 13:45:52.979831 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/02ebf06b-3a23-4030-ace5-52eac64985ef-registry-tls\") pod \"image-registry-66df7c8f76-qcgqd\" (UID: \"02ebf06b-3a23-4030-ace5-52eac64985ef\") " pod="openshift-image-registry/image-registry-66df7c8f76-qcgqd" Oct 06 13:45:52 crc kubenswrapper[4757]: I1006 13:45:52.979921 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/02ebf06b-3a23-4030-ace5-52eac64985ef-installation-pull-secrets\") pod \"image-registry-66df7c8f76-qcgqd\" (UID: \"02ebf06b-3a23-4030-ace5-52eac64985ef\") " pod="openshift-image-registry/image-registry-66df7c8f76-qcgqd" Oct 06 13:45:52 crc kubenswrapper[4757]: I1006 13:45:52.991399 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jlfbn\" (UniqueName: \"kubernetes.io/projected/02ebf06b-3a23-4030-ace5-52eac64985ef-kube-api-access-jlfbn\") pod \"image-registry-66df7c8f76-qcgqd\" (UID: \"02ebf06b-3a23-4030-ace5-52eac64985ef\") " pod="openshift-image-registry/image-registry-66df7c8f76-qcgqd" Oct 06 13:45:52 crc kubenswrapper[4757]: I1006 13:45:52.993441 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/02ebf06b-3a23-4030-ace5-52eac64985ef-bound-sa-token\") pod \"image-registry-66df7c8f76-qcgqd\" (UID: \"02ebf06b-3a23-4030-ace5-52eac64985ef\") " pod="openshift-image-registry/image-registry-66df7c8f76-qcgqd" Oct 06 13:45:53 crc kubenswrapper[4757]: I1006 13:45:53.059481 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-qcgqd" Oct 06 13:45:53 crc kubenswrapper[4757]: I1006 13:45:53.268757 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-qcgqd"] Oct 06 13:45:53 crc kubenswrapper[4757]: W1006 13:45:53.278471 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod02ebf06b_3a23_4030_ace5_52eac64985ef.slice/crio-ce7bde1b24ad313de531a3c5fb36709f6e4c84ff4636d4399d35da0bd1287a61 WatchSource:0}: Error finding container ce7bde1b24ad313de531a3c5fb36709f6e4c84ff4636d4399d35da0bd1287a61: Status 404 returned error can't find the container with id ce7bde1b24ad313de531a3c5fb36709f6e4c84ff4636d4399d35da0bd1287a61 Oct 06 13:45:54 crc kubenswrapper[4757]: I1006 13:45:54.019319 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-qcgqd" event={"ID":"02ebf06b-3a23-4030-ace5-52eac64985ef","Type":"ContainerStarted","Data":"1b0b3fd630e5df82beba37f34b293059514da7d64ceff100ac9520b4fadb0e8c"} Oct 06 13:45:54 crc kubenswrapper[4757]: I1006 13:45:54.019728 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-qcgqd" event={"ID":"02ebf06b-3a23-4030-ace5-52eac64985ef","Type":"ContainerStarted","Data":"ce7bde1b24ad313de531a3c5fb36709f6e4c84ff4636d4399d35da0bd1287a61"} Oct 06 13:45:54 crc kubenswrapper[4757]: I1006 13:45:54.019769 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-qcgqd" Oct 06 13:45:54 crc kubenswrapper[4757]: I1006 13:45:54.045527 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-qcgqd" podStartSLOduration=2.045506916 podStartE2EDuration="2.045506916s" podCreationTimestamp="2025-10-06 13:45:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:45:54.043330148 +0000 UTC m=+442.540648705" watchObservedRunningTime="2025-10-06 13:45:54.045506916 +0000 UTC m=+442.542825453" Oct 06 13:46:13 crc kubenswrapper[4757]: I1006 13:46:13.068935 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-qcgqd" Oct 06 13:46:13 crc kubenswrapper[4757]: I1006 13:46:13.133288 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-99wzw"] Oct 06 13:46:38 crc kubenswrapper[4757]: I1006 13:46:38.190785 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-99wzw" podUID="a7126d08-f833-4acc-b4f4-e7d4d88b00ca" containerName="registry" containerID="cri-o://a075f29fd5812a9e54f3e8d835a3f5ca47ec5ea7b4c76337ad1df03cdd2681f0" gracePeriod=30 Oct 06 13:46:38 crc kubenswrapper[4757]: I1006 13:46:38.533558 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-99wzw" Oct 06 13:46:38 crc kubenswrapper[4757]: I1006 13:46:38.646886 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gcj5f\" (UniqueName: \"kubernetes.io/projected/a7126d08-f833-4acc-b4f4-e7d4d88b00ca-kube-api-access-gcj5f\") pod \"a7126d08-f833-4acc-b4f4-e7d4d88b00ca\" (UID: \"a7126d08-f833-4acc-b4f4-e7d4d88b00ca\") " Oct 06 13:46:38 crc kubenswrapper[4757]: I1006 13:46:38.646993 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/a7126d08-f833-4acc-b4f4-e7d4d88b00ca-registry-certificates\") pod \"a7126d08-f833-4acc-b4f4-e7d4d88b00ca\" (UID: \"a7126d08-f833-4acc-b4f4-e7d4d88b00ca\") " Oct 06 13:46:38 crc kubenswrapper[4757]: I1006 13:46:38.647035 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a7126d08-f833-4acc-b4f4-e7d4d88b00ca-bound-sa-token\") pod \"a7126d08-f833-4acc-b4f4-e7d4d88b00ca\" (UID: \"a7126d08-f833-4acc-b4f4-e7d4d88b00ca\") " Oct 06 13:46:38 crc kubenswrapper[4757]: I1006 13:46:38.647074 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/a7126d08-f833-4acc-b4f4-e7d4d88b00ca-installation-pull-secrets\") pod \"a7126d08-f833-4acc-b4f4-e7d4d88b00ca\" (UID: \"a7126d08-f833-4acc-b4f4-e7d4d88b00ca\") " Oct 06 13:46:38 crc kubenswrapper[4757]: I1006 13:46:38.647429 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"a7126d08-f833-4acc-b4f4-e7d4d88b00ca\" (UID: \"a7126d08-f833-4acc-b4f4-e7d4d88b00ca\") " Oct 06 13:46:38 crc kubenswrapper[4757]: I1006 13:46:38.647476 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a7126d08-f833-4acc-b4f4-e7d4d88b00ca-trusted-ca\") pod \"a7126d08-f833-4acc-b4f4-e7d4d88b00ca\" (UID: \"a7126d08-f833-4acc-b4f4-e7d4d88b00ca\") " Oct 06 13:46:38 crc kubenswrapper[4757]: I1006 13:46:38.647510 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/a7126d08-f833-4acc-b4f4-e7d4d88b00ca-ca-trust-extracted\") pod \"a7126d08-f833-4acc-b4f4-e7d4d88b00ca\" (UID: \"a7126d08-f833-4acc-b4f4-e7d4d88b00ca\") " Oct 06 13:46:38 crc kubenswrapper[4757]: I1006 13:46:38.647619 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/a7126d08-f833-4acc-b4f4-e7d4d88b00ca-registry-tls\") pod \"a7126d08-f833-4acc-b4f4-e7d4d88b00ca\" (UID: \"a7126d08-f833-4acc-b4f4-e7d4d88b00ca\") " Oct 06 13:46:38 crc kubenswrapper[4757]: I1006 13:46:38.648893 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a7126d08-f833-4acc-b4f4-e7d4d88b00ca-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "a7126d08-f833-4acc-b4f4-e7d4d88b00ca" (UID: "a7126d08-f833-4acc-b4f4-e7d4d88b00ca"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:46:38 crc kubenswrapper[4757]: I1006 13:46:38.648988 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a7126d08-f833-4acc-b4f4-e7d4d88b00ca-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a7126d08-f833-4acc-b4f4-e7d4d88b00ca" (UID: "a7126d08-f833-4acc-b4f4-e7d4d88b00ca"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:46:38 crc kubenswrapper[4757]: I1006 13:46:38.654016 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a7126d08-f833-4acc-b4f4-e7d4d88b00ca-kube-api-access-gcj5f" (OuterVolumeSpecName: "kube-api-access-gcj5f") pod "a7126d08-f833-4acc-b4f4-e7d4d88b00ca" (UID: "a7126d08-f833-4acc-b4f4-e7d4d88b00ca"). InnerVolumeSpecName "kube-api-access-gcj5f". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:46:38 crc kubenswrapper[4757]: I1006 13:46:38.654783 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a7126d08-f833-4acc-b4f4-e7d4d88b00ca-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a7126d08-f833-4acc-b4f4-e7d4d88b00ca" (UID: "a7126d08-f833-4acc-b4f4-e7d4d88b00ca"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:46:38 crc kubenswrapper[4757]: I1006 13:46:38.654941 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a7126d08-f833-4acc-b4f4-e7d4d88b00ca-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "a7126d08-f833-4acc-b4f4-e7d4d88b00ca" (UID: "a7126d08-f833-4acc-b4f4-e7d4d88b00ca"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:46:38 crc kubenswrapper[4757]: I1006 13:46:38.655188 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a7126d08-f833-4acc-b4f4-e7d4d88b00ca-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "a7126d08-f833-4acc-b4f4-e7d4d88b00ca" (UID: "a7126d08-f833-4acc-b4f4-e7d4d88b00ca"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:46:38 crc kubenswrapper[4757]: I1006 13:46:38.658672 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "a7126d08-f833-4acc-b4f4-e7d4d88b00ca" (UID: "a7126d08-f833-4acc-b4f4-e7d4d88b00ca"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Oct 06 13:46:38 crc kubenswrapper[4757]: I1006 13:46:38.675574 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a7126d08-f833-4acc-b4f4-e7d4d88b00ca-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "a7126d08-f833-4acc-b4f4-e7d4d88b00ca" (UID: "a7126d08-f833-4acc-b4f4-e7d4d88b00ca"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 13:46:38 crc kubenswrapper[4757]: I1006 13:46:38.748976 4757 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a7126d08-f833-4acc-b4f4-e7d4d88b00ca-bound-sa-token\") on node \"crc\" DevicePath \"\"" Oct 06 13:46:38 crc kubenswrapper[4757]: I1006 13:46:38.749057 4757 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/a7126d08-f833-4acc-b4f4-e7d4d88b00ca-registry-certificates\") on node \"crc\" DevicePath \"\"" Oct 06 13:46:38 crc kubenswrapper[4757]: I1006 13:46:38.749090 4757 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/a7126d08-f833-4acc-b4f4-e7d4d88b00ca-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Oct 06 13:46:38 crc kubenswrapper[4757]: I1006 13:46:38.749134 4757 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a7126d08-f833-4acc-b4f4-e7d4d88b00ca-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 06 13:46:38 crc kubenswrapper[4757]: I1006 13:46:38.749152 4757 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/a7126d08-f833-4acc-b4f4-e7d4d88b00ca-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Oct 06 13:46:38 crc kubenswrapper[4757]: I1006 13:46:38.749171 4757 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/a7126d08-f833-4acc-b4f4-e7d4d88b00ca-registry-tls\") on node \"crc\" DevicePath \"\"" Oct 06 13:46:38 crc kubenswrapper[4757]: I1006 13:46:38.749188 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gcj5f\" (UniqueName: \"kubernetes.io/projected/a7126d08-f833-4acc-b4f4-e7d4d88b00ca-kube-api-access-gcj5f\") on node \"crc\" DevicePath \"\"" Oct 06 13:46:39 crc kubenswrapper[4757]: I1006 13:46:39.307899 4757 generic.go:334] "Generic (PLEG): container finished" podID="a7126d08-f833-4acc-b4f4-e7d4d88b00ca" containerID="a075f29fd5812a9e54f3e8d835a3f5ca47ec5ea7b4c76337ad1df03cdd2681f0" exitCode=0 Oct 06 13:46:39 crc kubenswrapper[4757]: I1006 13:46:39.308007 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-99wzw" Oct 06 13:46:39 crc kubenswrapper[4757]: I1006 13:46:39.308177 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-99wzw" event={"ID":"a7126d08-f833-4acc-b4f4-e7d4d88b00ca","Type":"ContainerDied","Data":"a075f29fd5812a9e54f3e8d835a3f5ca47ec5ea7b4c76337ad1df03cdd2681f0"} Oct 06 13:46:39 crc kubenswrapper[4757]: I1006 13:46:39.310609 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-99wzw" event={"ID":"a7126d08-f833-4acc-b4f4-e7d4d88b00ca","Type":"ContainerDied","Data":"74523d52d1ec252089eb5972f8460dbc649b6154651d7db727eebf7fc18fea87"} Oct 06 13:46:39 crc kubenswrapper[4757]: I1006 13:46:39.310674 4757 scope.go:117] "RemoveContainer" containerID="a075f29fd5812a9e54f3e8d835a3f5ca47ec5ea7b4c76337ad1df03cdd2681f0" Oct 06 13:46:39 crc kubenswrapper[4757]: I1006 13:46:39.337173 4757 scope.go:117] "RemoveContainer" containerID="a075f29fd5812a9e54f3e8d835a3f5ca47ec5ea7b4c76337ad1df03cdd2681f0" Oct 06 13:46:39 crc kubenswrapper[4757]: E1006 13:46:39.337791 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a075f29fd5812a9e54f3e8d835a3f5ca47ec5ea7b4c76337ad1df03cdd2681f0\": container with ID starting with a075f29fd5812a9e54f3e8d835a3f5ca47ec5ea7b4c76337ad1df03cdd2681f0 not found: ID does not exist" containerID="a075f29fd5812a9e54f3e8d835a3f5ca47ec5ea7b4c76337ad1df03cdd2681f0" Oct 06 13:46:39 crc kubenswrapper[4757]: I1006 13:46:39.337837 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a075f29fd5812a9e54f3e8d835a3f5ca47ec5ea7b4c76337ad1df03cdd2681f0"} err="failed to get container status \"a075f29fd5812a9e54f3e8d835a3f5ca47ec5ea7b4c76337ad1df03cdd2681f0\": rpc error: code = NotFound desc = could not find container \"a075f29fd5812a9e54f3e8d835a3f5ca47ec5ea7b4c76337ad1df03cdd2681f0\": container with ID starting with a075f29fd5812a9e54f3e8d835a3f5ca47ec5ea7b4c76337ad1df03cdd2681f0 not found: ID does not exist" Oct 06 13:46:39 crc kubenswrapper[4757]: I1006 13:46:39.375384 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-99wzw"] Oct 06 13:46:39 crc kubenswrapper[4757]: I1006 13:46:39.383616 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-99wzw"] Oct 06 13:46:40 crc kubenswrapper[4757]: I1006 13:46:40.191655 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a7126d08-f833-4acc-b4f4-e7d4d88b00ca" path="/var/lib/kubelet/pods/a7126d08-f833-4acc-b4f4-e7d4d88b00ca/volumes" Oct 06 13:47:34 crc kubenswrapper[4757]: I1006 13:47:34.363071 4757 patch_prober.go:28] interesting pod/machine-config-daemon-7tb7h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 06 13:47:34 crc kubenswrapper[4757]: I1006 13:47:34.363802 4757 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 06 13:48:04 crc kubenswrapper[4757]: I1006 13:48:04.361602 4757 patch_prober.go:28] interesting pod/machine-config-daemon-7tb7h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 06 13:48:04 crc kubenswrapper[4757]: I1006 13:48:04.362502 4757 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 06 13:48:34 crc kubenswrapper[4757]: I1006 13:48:34.362059 4757 patch_prober.go:28] interesting pod/machine-config-daemon-7tb7h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 06 13:48:34 crc kubenswrapper[4757]: I1006 13:48:34.363016 4757 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 06 13:48:34 crc kubenswrapper[4757]: I1006 13:48:34.363139 4757 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" Oct 06 13:48:34 crc kubenswrapper[4757]: I1006 13:48:34.364302 4757 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"c1caaff02f46fc52972611163537c1807a4d146f35cf5ae6f7d131326516fd78"} pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 06 13:48:34 crc kubenswrapper[4757]: I1006 13:48:34.364407 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" containerName="machine-config-daemon" containerID="cri-o://c1caaff02f46fc52972611163537c1807a4d146f35cf5ae6f7d131326516fd78" gracePeriod=600 Oct 06 13:48:35 crc kubenswrapper[4757]: I1006 13:48:35.035663 4757 generic.go:334] "Generic (PLEG): container finished" podID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" containerID="c1caaff02f46fc52972611163537c1807a4d146f35cf5ae6f7d131326516fd78" exitCode=0 Oct 06 13:48:35 crc kubenswrapper[4757]: I1006 13:48:35.035783 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" event={"ID":"0010c888-d5ad-4b2b-8309-1647fdf0dee3","Type":"ContainerDied","Data":"c1caaff02f46fc52972611163537c1807a4d146f35cf5ae6f7d131326516fd78"} Oct 06 13:48:35 crc kubenswrapper[4757]: I1006 13:48:35.036220 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" event={"ID":"0010c888-d5ad-4b2b-8309-1647fdf0dee3","Type":"ContainerStarted","Data":"6542a2ffbfd366a895d5ce83b4ab3a725c3018b8a608b5d8f6bff2292762cd35"} Oct 06 13:48:35 crc kubenswrapper[4757]: I1006 13:48:35.036247 4757 scope.go:117] "RemoveContainer" containerID="cbd63c2fd83eb3b13b70ca6d17410007d8a08e6fecff91ef597ee01a17e3a5db" Oct 06 13:49:23 crc kubenswrapper[4757]: I1006 13:49:23.811157 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-58bhb"] Oct 06 13:49:23 crc kubenswrapper[4757]: I1006 13:49:23.812602 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" podUID="a6624d05-e024-49f2-bf87-33e7ea4fccbb" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://75c8038f49690378c7322e4d2e50f7d5073bb744c3f64317b7c3f4acffaa338d" gracePeriod=30 Oct 06 13:49:23 crc kubenswrapper[4757]: I1006 13:49:23.812685 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" podUID="a6624d05-e024-49f2-bf87-33e7ea4fccbb" containerName="kube-rbac-proxy-node" containerID="cri-o://cbe06e8892901366539846fa68ea4e379c32ca7b21843383dc4a72ac70f92b98" gracePeriod=30 Oct 06 13:49:23 crc kubenswrapper[4757]: I1006 13:49:23.812788 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" podUID="a6624d05-e024-49f2-bf87-33e7ea4fccbb" containerName="northd" containerID="cri-o://4093bde478f4910c2b1e2341bb0b3f640454b131e31b9ab7ad5900b8260b7561" gracePeriod=30 Oct 06 13:49:23 crc kubenswrapper[4757]: I1006 13:49:23.812735 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" podUID="a6624d05-e024-49f2-bf87-33e7ea4fccbb" containerName="ovn-acl-logging" containerID="cri-o://1580d51cb7ec22f70787157dc80236a312eca77785329c663d78c02ee9a0685d" gracePeriod=30 Oct 06 13:49:23 crc kubenswrapper[4757]: I1006 13:49:23.812920 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" podUID="a6624d05-e024-49f2-bf87-33e7ea4fccbb" containerName="sbdb" containerID="cri-o://5852bc46ae07b295a97cca9f5ef7d06484c12744c0f6ee57b2d14acee319fcaf" gracePeriod=30 Oct 06 13:49:23 crc kubenswrapper[4757]: I1006 13:49:23.812631 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" podUID="a6624d05-e024-49f2-bf87-33e7ea4fccbb" containerName="nbdb" containerID="cri-o://df2b012b79afa6379b4a7add72a9093bbdbfcc8d618d6eae350a773fdddacfa8" gracePeriod=30 Oct 06 13:49:23 crc kubenswrapper[4757]: I1006 13:49:23.821174 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" podUID="a6624d05-e024-49f2-bf87-33e7ea4fccbb" containerName="ovn-controller" containerID="cri-o://38df7deb98a16e465d5fdbad8e04c2e62c5cfb418a48fe9f01da335dba2907ed" gracePeriod=30 Oct 06 13:49:23 crc kubenswrapper[4757]: I1006 13:49:23.876458 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" podUID="a6624d05-e024-49f2-bf87-33e7ea4fccbb" containerName="ovnkube-controller" containerID="cri-o://3d34b3f2b73e53437a757437f264e99884eeedb564ce4e2a723a0345f0b383ab" gracePeriod=30 Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.164316 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-58bhb_a6624d05-e024-49f2-bf87-33e7ea4fccbb/ovnkube-controller/3.log" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.166833 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-58bhb_a6624d05-e024-49f2-bf87-33e7ea4fccbb/ovn-acl-logging/0.log" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.167430 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-58bhb_a6624d05-e024-49f2-bf87-33e7ea4fccbb/ovn-controller/0.log" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.167835 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.228051 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-l46sb"] Oct 06 13:49:24 crc kubenswrapper[4757]: E1006 13:49:24.228340 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a6624d05-e024-49f2-bf87-33e7ea4fccbb" containerName="nbdb" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.228358 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="a6624d05-e024-49f2-bf87-33e7ea4fccbb" containerName="nbdb" Oct 06 13:49:24 crc kubenswrapper[4757]: E1006 13:49:24.228371 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a6624d05-e024-49f2-bf87-33e7ea4fccbb" containerName="ovnkube-controller" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.228379 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="a6624d05-e024-49f2-bf87-33e7ea4fccbb" containerName="ovnkube-controller" Oct 06 13:49:24 crc kubenswrapper[4757]: E1006 13:49:24.228387 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a6624d05-e024-49f2-bf87-33e7ea4fccbb" containerName="ovnkube-controller" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.228392 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="a6624d05-e024-49f2-bf87-33e7ea4fccbb" containerName="ovnkube-controller" Oct 06 13:49:24 crc kubenswrapper[4757]: E1006 13:49:24.228399 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a7126d08-f833-4acc-b4f4-e7d4d88b00ca" containerName="registry" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.228405 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="a7126d08-f833-4acc-b4f4-e7d4d88b00ca" containerName="registry" Oct 06 13:49:24 crc kubenswrapper[4757]: E1006 13:49:24.228411 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a6624d05-e024-49f2-bf87-33e7ea4fccbb" containerName="kubecfg-setup" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.228417 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="a6624d05-e024-49f2-bf87-33e7ea4fccbb" containerName="kubecfg-setup" Oct 06 13:49:24 crc kubenswrapper[4757]: E1006 13:49:24.228425 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a6624d05-e024-49f2-bf87-33e7ea4fccbb" containerName="ovn-controller" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.228431 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="a6624d05-e024-49f2-bf87-33e7ea4fccbb" containerName="ovn-controller" Oct 06 13:49:24 crc kubenswrapper[4757]: E1006 13:49:24.228440 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a6624d05-e024-49f2-bf87-33e7ea4fccbb" containerName="ovn-acl-logging" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.228446 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="a6624d05-e024-49f2-bf87-33e7ea4fccbb" containerName="ovn-acl-logging" Oct 06 13:49:24 crc kubenswrapper[4757]: E1006 13:49:24.228455 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a6624d05-e024-49f2-bf87-33e7ea4fccbb" containerName="ovnkube-controller" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.228461 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="a6624d05-e024-49f2-bf87-33e7ea4fccbb" containerName="ovnkube-controller" Oct 06 13:49:24 crc kubenswrapper[4757]: E1006 13:49:24.228468 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a6624d05-e024-49f2-bf87-33e7ea4fccbb" containerName="ovnkube-controller" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.228474 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="a6624d05-e024-49f2-bf87-33e7ea4fccbb" containerName="ovnkube-controller" Oct 06 13:49:24 crc kubenswrapper[4757]: E1006 13:49:24.228481 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a6624d05-e024-49f2-bf87-33e7ea4fccbb" containerName="sbdb" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.228486 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="a6624d05-e024-49f2-bf87-33e7ea4fccbb" containerName="sbdb" Oct 06 13:49:24 crc kubenswrapper[4757]: E1006 13:49:24.228496 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a6624d05-e024-49f2-bf87-33e7ea4fccbb" containerName="northd" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.228501 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="a6624d05-e024-49f2-bf87-33e7ea4fccbb" containerName="northd" Oct 06 13:49:24 crc kubenswrapper[4757]: E1006 13:49:24.228510 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a6624d05-e024-49f2-bf87-33e7ea4fccbb" containerName="kube-rbac-proxy-ovn-metrics" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.228517 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="a6624d05-e024-49f2-bf87-33e7ea4fccbb" containerName="kube-rbac-proxy-ovn-metrics" Oct 06 13:49:24 crc kubenswrapper[4757]: E1006 13:49:24.228523 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a6624d05-e024-49f2-bf87-33e7ea4fccbb" containerName="kube-rbac-proxy-node" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.228529 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="a6624d05-e024-49f2-bf87-33e7ea4fccbb" containerName="kube-rbac-proxy-node" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.228613 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="a6624d05-e024-49f2-bf87-33e7ea4fccbb" containerName="kube-rbac-proxy-ovn-metrics" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.228625 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="a7126d08-f833-4acc-b4f4-e7d4d88b00ca" containerName="registry" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.228632 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="a6624d05-e024-49f2-bf87-33e7ea4fccbb" containerName="kube-rbac-proxy-node" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.228640 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="a6624d05-e024-49f2-bf87-33e7ea4fccbb" containerName="ovnkube-controller" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.228647 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="a6624d05-e024-49f2-bf87-33e7ea4fccbb" containerName="ovnkube-controller" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.228653 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="a6624d05-e024-49f2-bf87-33e7ea4fccbb" containerName="ovnkube-controller" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.228661 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="a6624d05-e024-49f2-bf87-33e7ea4fccbb" containerName="ovn-controller" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.228667 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="a6624d05-e024-49f2-bf87-33e7ea4fccbb" containerName="ovnkube-controller" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.228675 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="a6624d05-e024-49f2-bf87-33e7ea4fccbb" containerName="ovn-acl-logging" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.228681 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="a6624d05-e024-49f2-bf87-33e7ea4fccbb" containerName="sbdb" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.228688 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="a6624d05-e024-49f2-bf87-33e7ea4fccbb" containerName="nbdb" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.228695 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="a6624d05-e024-49f2-bf87-33e7ea4fccbb" containerName="northd" Oct 06 13:49:24 crc kubenswrapper[4757]: E1006 13:49:24.228773 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a6624d05-e024-49f2-bf87-33e7ea4fccbb" containerName="ovnkube-controller" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.228782 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="a6624d05-e024-49f2-bf87-33e7ea4fccbb" containerName="ovnkube-controller" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.228861 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="a6624d05-e024-49f2-bf87-33e7ea4fccbb" containerName="ovnkube-controller" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.230293 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-l46sb" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.272018 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/a6624d05-e024-49f2-bf87-33e7ea4fccbb-ovn-node-metrics-cert\") pod \"a6624d05-e024-49f2-bf87-33e7ea4fccbb\" (UID: \"a6624d05-e024-49f2-bf87-33e7ea4fccbb\") " Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.272491 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/a6624d05-e024-49f2-bf87-33e7ea4fccbb-run-openvswitch\") pod \"a6624d05-e024-49f2-bf87-33e7ea4fccbb\" (UID: \"a6624d05-e024-49f2-bf87-33e7ea4fccbb\") " Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.272740 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/a6624d05-e024-49f2-bf87-33e7ea4fccbb-node-log\") pod \"a6624d05-e024-49f2-bf87-33e7ea4fccbb\" (UID: \"a6624d05-e024-49f2-bf87-33e7ea4fccbb\") " Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.272883 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a6624d05-e024-49f2-bf87-33e7ea4fccbb-node-log" (OuterVolumeSpecName: "node-log") pod "a6624d05-e024-49f2-bf87-33e7ea4fccbb" (UID: "a6624d05-e024-49f2-bf87-33e7ea4fccbb"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.272992 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a6624d05-e024-49f2-bf87-33e7ea4fccbb-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "a6624d05-e024-49f2-bf87-33e7ea4fccbb" (UID: "a6624d05-e024-49f2-bf87-33e7ea4fccbb"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.273039 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a6624d05-e024-49f2-bf87-33e7ea4fccbb-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "a6624d05-e024-49f2-bf87-33e7ea4fccbb" (UID: "a6624d05-e024-49f2-bf87-33e7ea4fccbb"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.272931 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/a6624d05-e024-49f2-bf87-33e7ea4fccbb-host-cni-netd\") pod \"a6624d05-e024-49f2-bf87-33e7ea4fccbb\" (UID: \"a6624d05-e024-49f2-bf87-33e7ea4fccbb\") " Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.273224 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/a6624d05-e024-49f2-bf87-33e7ea4fccbb-host-var-lib-cni-networks-ovn-kubernetes\") pod \"a6624d05-e024-49f2-bf87-33e7ea4fccbb\" (UID: \"a6624d05-e024-49f2-bf87-33e7ea4fccbb\") " Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.273262 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/a6624d05-e024-49f2-bf87-33e7ea4fccbb-host-run-ovn-kubernetes\") pod \"a6624d05-e024-49f2-bf87-33e7ea4fccbb\" (UID: \"a6624d05-e024-49f2-bf87-33e7ea4fccbb\") " Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.273281 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/a6624d05-e024-49f2-bf87-33e7ea4fccbb-host-slash\") pod \"a6624d05-e024-49f2-bf87-33e7ea4fccbb\" (UID: \"a6624d05-e024-49f2-bf87-33e7ea4fccbb\") " Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.273326 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/a6624d05-e024-49f2-bf87-33e7ea4fccbb-ovnkube-script-lib\") pod \"a6624d05-e024-49f2-bf87-33e7ea4fccbb\" (UID: \"a6624d05-e024-49f2-bf87-33e7ea4fccbb\") " Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.273377 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/a6624d05-e024-49f2-bf87-33e7ea4fccbb-var-lib-openvswitch\") pod \"a6624d05-e024-49f2-bf87-33e7ea4fccbb\" (UID: \"a6624d05-e024-49f2-bf87-33e7ea4fccbb\") " Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.273392 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/a6624d05-e024-49f2-bf87-33e7ea4fccbb-host-run-netns\") pod \"a6624d05-e024-49f2-bf87-33e7ea4fccbb\" (UID: \"a6624d05-e024-49f2-bf87-33e7ea4fccbb\") " Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.273418 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/a6624d05-e024-49f2-bf87-33e7ea4fccbb-host-kubelet\") pod \"a6624d05-e024-49f2-bf87-33e7ea4fccbb\" (UID: \"a6624d05-e024-49f2-bf87-33e7ea4fccbb\") " Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.273445 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cdf96\" (UniqueName: \"kubernetes.io/projected/a6624d05-e024-49f2-bf87-33e7ea4fccbb-kube-api-access-cdf96\") pod \"a6624d05-e024-49f2-bf87-33e7ea4fccbb\" (UID: \"a6624d05-e024-49f2-bf87-33e7ea4fccbb\") " Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.273469 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/a6624d05-e024-49f2-bf87-33e7ea4fccbb-etc-openvswitch\") pod \"a6624d05-e024-49f2-bf87-33e7ea4fccbb\" (UID: \"a6624d05-e024-49f2-bf87-33e7ea4fccbb\") " Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.273502 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/a6624d05-e024-49f2-bf87-33e7ea4fccbb-ovnkube-config\") pod \"a6624d05-e024-49f2-bf87-33e7ea4fccbb\" (UID: \"a6624d05-e024-49f2-bf87-33e7ea4fccbb\") " Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.273527 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/a6624d05-e024-49f2-bf87-33e7ea4fccbb-run-ovn\") pod \"a6624d05-e024-49f2-bf87-33e7ea4fccbb\" (UID: \"a6624d05-e024-49f2-bf87-33e7ea4fccbb\") " Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.273549 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/a6624d05-e024-49f2-bf87-33e7ea4fccbb-env-overrides\") pod \"a6624d05-e024-49f2-bf87-33e7ea4fccbb\" (UID: \"a6624d05-e024-49f2-bf87-33e7ea4fccbb\") " Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.273602 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/a6624d05-e024-49f2-bf87-33e7ea4fccbb-systemd-units\") pod \"a6624d05-e024-49f2-bf87-33e7ea4fccbb\" (UID: \"a6624d05-e024-49f2-bf87-33e7ea4fccbb\") " Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.273639 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/a6624d05-e024-49f2-bf87-33e7ea4fccbb-log-socket\") pod \"a6624d05-e024-49f2-bf87-33e7ea4fccbb\" (UID: \"a6624d05-e024-49f2-bf87-33e7ea4fccbb\") " Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.273653 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/a6624d05-e024-49f2-bf87-33e7ea4fccbb-run-systemd\") pod \"a6624d05-e024-49f2-bf87-33e7ea4fccbb\" (UID: \"a6624d05-e024-49f2-bf87-33e7ea4fccbb\") " Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.273677 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/a6624d05-e024-49f2-bf87-33e7ea4fccbb-host-cni-bin\") pod \"a6624d05-e024-49f2-bf87-33e7ea4fccbb\" (UID: \"a6624d05-e024-49f2-bf87-33e7ea4fccbb\") " Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.274292 4757 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/a6624d05-e024-49f2-bf87-33e7ea4fccbb-run-openvswitch\") on node \"crc\" DevicePath \"\"" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.274312 4757 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/a6624d05-e024-49f2-bf87-33e7ea4fccbb-node-log\") on node \"crc\" DevicePath \"\"" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.274321 4757 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/a6624d05-e024-49f2-bf87-33e7ea4fccbb-host-cni-netd\") on node \"crc\" DevicePath \"\"" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.274358 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a6624d05-e024-49f2-bf87-33e7ea4fccbb-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "a6624d05-e024-49f2-bf87-33e7ea4fccbb" (UID: "a6624d05-e024-49f2-bf87-33e7ea4fccbb"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.274389 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a6624d05-e024-49f2-bf87-33e7ea4fccbb-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "a6624d05-e024-49f2-bf87-33e7ea4fccbb" (UID: "a6624d05-e024-49f2-bf87-33e7ea4fccbb"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.274415 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a6624d05-e024-49f2-bf87-33e7ea4fccbb-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "a6624d05-e024-49f2-bf87-33e7ea4fccbb" (UID: "a6624d05-e024-49f2-bf87-33e7ea4fccbb"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.274442 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a6624d05-e024-49f2-bf87-33e7ea4fccbb-host-slash" (OuterVolumeSpecName: "host-slash") pod "a6624d05-e024-49f2-bf87-33e7ea4fccbb" (UID: "a6624d05-e024-49f2-bf87-33e7ea4fccbb"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.274802 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a6624d05-e024-49f2-bf87-33e7ea4fccbb-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "a6624d05-e024-49f2-bf87-33e7ea4fccbb" (UID: "a6624d05-e024-49f2-bf87-33e7ea4fccbb"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.274865 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a6624d05-e024-49f2-bf87-33e7ea4fccbb-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "a6624d05-e024-49f2-bf87-33e7ea4fccbb" (UID: "a6624d05-e024-49f2-bf87-33e7ea4fccbb"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.274869 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a6624d05-e024-49f2-bf87-33e7ea4fccbb-log-socket" (OuterVolumeSpecName: "log-socket") pod "a6624d05-e024-49f2-bf87-33e7ea4fccbb" (UID: "a6624d05-e024-49f2-bf87-33e7ea4fccbb"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.274897 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a6624d05-e024-49f2-bf87-33e7ea4fccbb-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "a6624d05-e024-49f2-bf87-33e7ea4fccbb" (UID: "a6624d05-e024-49f2-bf87-33e7ea4fccbb"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.274903 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a6624d05-e024-49f2-bf87-33e7ea4fccbb-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "a6624d05-e024-49f2-bf87-33e7ea4fccbb" (UID: "a6624d05-e024-49f2-bf87-33e7ea4fccbb"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.274929 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a6624d05-e024-49f2-bf87-33e7ea4fccbb-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "a6624d05-e024-49f2-bf87-33e7ea4fccbb" (UID: "a6624d05-e024-49f2-bf87-33e7ea4fccbb"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.274955 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a6624d05-e024-49f2-bf87-33e7ea4fccbb-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "a6624d05-e024-49f2-bf87-33e7ea4fccbb" (UID: "a6624d05-e024-49f2-bf87-33e7ea4fccbb"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.275244 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a6624d05-e024-49f2-bf87-33e7ea4fccbb-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "a6624d05-e024-49f2-bf87-33e7ea4fccbb" (UID: "a6624d05-e024-49f2-bf87-33e7ea4fccbb"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.275402 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a6624d05-e024-49f2-bf87-33e7ea4fccbb-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "a6624d05-e024-49f2-bf87-33e7ea4fccbb" (UID: "a6624d05-e024-49f2-bf87-33e7ea4fccbb"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.276308 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a6624d05-e024-49f2-bf87-33e7ea4fccbb-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "a6624d05-e024-49f2-bf87-33e7ea4fccbb" (UID: "a6624d05-e024-49f2-bf87-33e7ea4fccbb"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.282878 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a6624d05-e024-49f2-bf87-33e7ea4fccbb-kube-api-access-cdf96" (OuterVolumeSpecName: "kube-api-access-cdf96") pod "a6624d05-e024-49f2-bf87-33e7ea4fccbb" (UID: "a6624d05-e024-49f2-bf87-33e7ea4fccbb"). InnerVolumeSpecName "kube-api-access-cdf96". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.283417 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a6624d05-e024-49f2-bf87-33e7ea4fccbb-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "a6624d05-e024-49f2-bf87-33e7ea4fccbb" (UID: "a6624d05-e024-49f2-bf87-33e7ea4fccbb"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.293313 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a6624d05-e024-49f2-bf87-33e7ea4fccbb-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "a6624d05-e024-49f2-bf87-33e7ea4fccbb" (UID: "a6624d05-e024-49f2-bf87-33e7ea4fccbb"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.356656 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-9qf7z_9144d9fd-70d7-4a29-8e6b-c020c611980a/kube-multus/2.log" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.357254 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-9qf7z_9144d9fd-70d7-4a29-8e6b-c020c611980a/kube-multus/1.log" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.357305 4757 generic.go:334] "Generic (PLEG): container finished" podID="9144d9fd-70d7-4a29-8e6b-c020c611980a" containerID="5fee59d9cfb29bcaa00f0f7e454083411d714d0dcdc7f6fb55333dd7c18d4f4b" exitCode=2 Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.357463 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-9qf7z" event={"ID":"9144d9fd-70d7-4a29-8e6b-c020c611980a","Type":"ContainerDied","Data":"5fee59d9cfb29bcaa00f0f7e454083411d714d0dcdc7f6fb55333dd7c18d4f4b"} Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.357535 4757 scope.go:117] "RemoveContainer" containerID="ec9c673c59529f08cb919af940f8384584309e4e113c65431a529b935d1df3f9" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.358390 4757 scope.go:117] "RemoveContainer" containerID="5fee59d9cfb29bcaa00f0f7e454083411d714d0dcdc7f6fb55333dd7c18d4f4b" Oct 06 13:49:24 crc kubenswrapper[4757]: E1006 13:49:24.358671 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-9qf7z_openshift-multus(9144d9fd-70d7-4a29-8e6b-c020c611980a)\"" pod="openshift-multus/multus-9qf7z" podUID="9144d9fd-70d7-4a29-8e6b-c020c611980a" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.361875 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-58bhb_a6624d05-e024-49f2-bf87-33e7ea4fccbb/ovnkube-controller/3.log" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.375604 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-58bhb_a6624d05-e024-49f2-bf87-33e7ea4fccbb/ovn-acl-logging/0.log" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.375259 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ttpfr\" (UniqueName: \"kubernetes.io/projected/15b33d35-4328-4e9c-bc6e-43d0c847f253-kube-api-access-ttpfr\") pod \"ovnkube-node-l46sb\" (UID: \"15b33d35-4328-4e9c-bc6e-43d0c847f253\") " pod="openshift-ovn-kubernetes/ovnkube-node-l46sb" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.375789 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/15b33d35-4328-4e9c-bc6e-43d0c847f253-run-openvswitch\") pod \"ovnkube-node-l46sb\" (UID: \"15b33d35-4328-4e9c-bc6e-43d0c847f253\") " pod="openshift-ovn-kubernetes/ovnkube-node-l46sb" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.376018 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/15b33d35-4328-4e9c-bc6e-43d0c847f253-ovnkube-config\") pod \"ovnkube-node-l46sb\" (UID: \"15b33d35-4328-4e9c-bc6e-43d0c847f253\") " pod="openshift-ovn-kubernetes/ovnkube-node-l46sb" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.376128 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/15b33d35-4328-4e9c-bc6e-43d0c847f253-host-cni-bin\") pod \"ovnkube-node-l46sb\" (UID: \"15b33d35-4328-4e9c-bc6e-43d0c847f253\") " pod="openshift-ovn-kubernetes/ovnkube-node-l46sb" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.376232 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/15b33d35-4328-4e9c-bc6e-43d0c847f253-host-kubelet\") pod \"ovnkube-node-l46sb\" (UID: \"15b33d35-4328-4e9c-bc6e-43d0c847f253\") " pod="openshift-ovn-kubernetes/ovnkube-node-l46sb" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.376300 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/15b33d35-4328-4e9c-bc6e-43d0c847f253-env-overrides\") pod \"ovnkube-node-l46sb\" (UID: \"15b33d35-4328-4e9c-bc6e-43d0c847f253\") " pod="openshift-ovn-kubernetes/ovnkube-node-l46sb" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.376437 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/15b33d35-4328-4e9c-bc6e-43d0c847f253-node-log\") pod \"ovnkube-node-l46sb\" (UID: \"15b33d35-4328-4e9c-bc6e-43d0c847f253\") " pod="openshift-ovn-kubernetes/ovnkube-node-l46sb" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.376490 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/15b33d35-4328-4e9c-bc6e-43d0c847f253-systemd-units\") pod \"ovnkube-node-l46sb\" (UID: \"15b33d35-4328-4e9c-bc6e-43d0c847f253\") " pod="openshift-ovn-kubernetes/ovnkube-node-l46sb" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.376515 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/15b33d35-4328-4e9c-bc6e-43d0c847f253-run-systemd\") pod \"ovnkube-node-l46sb\" (UID: \"15b33d35-4328-4e9c-bc6e-43d0c847f253\") " pod="openshift-ovn-kubernetes/ovnkube-node-l46sb" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.376599 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/15b33d35-4328-4e9c-bc6e-43d0c847f253-var-lib-openvswitch\") pod \"ovnkube-node-l46sb\" (UID: \"15b33d35-4328-4e9c-bc6e-43d0c847f253\") " pod="openshift-ovn-kubernetes/ovnkube-node-l46sb" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.376696 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/15b33d35-4328-4e9c-bc6e-43d0c847f253-etc-openvswitch\") pod \"ovnkube-node-l46sb\" (UID: \"15b33d35-4328-4e9c-bc6e-43d0c847f253\") " pod="openshift-ovn-kubernetes/ovnkube-node-l46sb" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.376728 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/15b33d35-4328-4e9c-bc6e-43d0c847f253-host-cni-netd\") pod \"ovnkube-node-l46sb\" (UID: \"15b33d35-4328-4e9c-bc6e-43d0c847f253\") " pod="openshift-ovn-kubernetes/ovnkube-node-l46sb" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.376740 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-58bhb_a6624d05-e024-49f2-bf87-33e7ea4fccbb/ovn-controller/0.log" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.376750 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/15b33d35-4328-4e9c-bc6e-43d0c847f253-host-run-ovn-kubernetes\") pod \"ovnkube-node-l46sb\" (UID: \"15b33d35-4328-4e9c-bc6e-43d0c847f253\") " pod="openshift-ovn-kubernetes/ovnkube-node-l46sb" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.376832 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/15b33d35-4328-4e9c-bc6e-43d0c847f253-log-socket\") pod \"ovnkube-node-l46sb\" (UID: \"15b33d35-4328-4e9c-bc6e-43d0c847f253\") " pod="openshift-ovn-kubernetes/ovnkube-node-l46sb" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.376867 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/15b33d35-4328-4e9c-bc6e-43d0c847f253-host-run-netns\") pod \"ovnkube-node-l46sb\" (UID: \"15b33d35-4328-4e9c-bc6e-43d0c847f253\") " pod="openshift-ovn-kubernetes/ovnkube-node-l46sb" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.376898 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/15b33d35-4328-4e9c-bc6e-43d0c847f253-host-slash\") pod \"ovnkube-node-l46sb\" (UID: \"15b33d35-4328-4e9c-bc6e-43d0c847f253\") " pod="openshift-ovn-kubernetes/ovnkube-node-l46sb" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.376971 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/15b33d35-4328-4e9c-bc6e-43d0c847f253-ovn-node-metrics-cert\") pod \"ovnkube-node-l46sb\" (UID: \"15b33d35-4328-4e9c-bc6e-43d0c847f253\") " pod="openshift-ovn-kubernetes/ovnkube-node-l46sb" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.377082 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/15b33d35-4328-4e9c-bc6e-43d0c847f253-run-ovn\") pod \"ovnkube-node-l46sb\" (UID: \"15b33d35-4328-4e9c-bc6e-43d0c847f253\") " pod="openshift-ovn-kubernetes/ovnkube-node-l46sb" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.377166 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/15b33d35-4328-4e9c-bc6e-43d0c847f253-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-l46sb\" (UID: \"15b33d35-4328-4e9c-bc6e-43d0c847f253\") " pod="openshift-ovn-kubernetes/ovnkube-node-l46sb" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.377272 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/15b33d35-4328-4e9c-bc6e-43d0c847f253-ovnkube-script-lib\") pod \"ovnkube-node-l46sb\" (UID: \"15b33d35-4328-4e9c-bc6e-43d0c847f253\") " pod="openshift-ovn-kubernetes/ovnkube-node-l46sb" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.377443 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cdf96\" (UniqueName: \"kubernetes.io/projected/a6624d05-e024-49f2-bf87-33e7ea4fccbb-kube-api-access-cdf96\") on node \"crc\" DevicePath \"\"" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.377465 4757 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/a6624d05-e024-49f2-bf87-33e7ea4fccbb-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.377487 4757 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/a6624d05-e024-49f2-bf87-33e7ea4fccbb-ovnkube-config\") on node \"crc\" DevicePath \"\"" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.377520 4757 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/a6624d05-e024-49f2-bf87-33e7ea4fccbb-run-ovn\") on node \"crc\" DevicePath \"\"" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.377536 4757 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/a6624d05-e024-49f2-bf87-33e7ea4fccbb-env-overrides\") on node \"crc\" DevicePath \"\"" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.377558 4757 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/a6624d05-e024-49f2-bf87-33e7ea4fccbb-systemd-units\") on node \"crc\" DevicePath \"\"" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.377577 4757 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/a6624d05-e024-49f2-bf87-33e7ea4fccbb-log-socket\") on node \"crc\" DevicePath \"\"" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.377594 4757 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/a6624d05-e024-49f2-bf87-33e7ea4fccbb-run-systemd\") on node \"crc\" DevicePath \"\"" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.377612 4757 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/a6624d05-e024-49f2-bf87-33e7ea4fccbb-host-cni-bin\") on node \"crc\" DevicePath \"\"" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.377643 4757 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/a6624d05-e024-49f2-bf87-33e7ea4fccbb-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.377662 4757 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/a6624d05-e024-49f2-bf87-33e7ea4fccbb-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.377683 4757 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/a6624d05-e024-49f2-bf87-33e7ea4fccbb-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.377702 4757 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/a6624d05-e024-49f2-bf87-33e7ea4fccbb-host-slash\") on node \"crc\" DevicePath \"\"" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.377719 4757 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/a6624d05-e024-49f2-bf87-33e7ea4fccbb-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.377738 4757 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/a6624d05-e024-49f2-bf87-33e7ea4fccbb-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.377755 4757 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/a6624d05-e024-49f2-bf87-33e7ea4fccbb-host-run-netns\") on node \"crc\" DevicePath \"\"" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.377771 4757 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/a6624d05-e024-49f2-bf87-33e7ea4fccbb-host-kubelet\") on node \"crc\" DevicePath \"\"" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.377795 4757 generic.go:334] "Generic (PLEG): container finished" podID="a6624d05-e024-49f2-bf87-33e7ea4fccbb" containerID="3d34b3f2b73e53437a757437f264e99884eeedb564ce4e2a723a0345f0b383ab" exitCode=0 Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.377826 4757 generic.go:334] "Generic (PLEG): container finished" podID="a6624d05-e024-49f2-bf87-33e7ea4fccbb" containerID="5852bc46ae07b295a97cca9f5ef7d06484c12744c0f6ee57b2d14acee319fcaf" exitCode=0 Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.377865 4757 generic.go:334] "Generic (PLEG): container finished" podID="a6624d05-e024-49f2-bf87-33e7ea4fccbb" containerID="df2b012b79afa6379b4a7add72a9093bbdbfcc8d618d6eae350a773fdddacfa8" exitCode=0 Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.377880 4757 generic.go:334] "Generic (PLEG): container finished" podID="a6624d05-e024-49f2-bf87-33e7ea4fccbb" containerID="4093bde478f4910c2b1e2341bb0b3f640454b131e31b9ab7ad5900b8260b7561" exitCode=0 Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.377894 4757 generic.go:334] "Generic (PLEG): container finished" podID="a6624d05-e024-49f2-bf87-33e7ea4fccbb" containerID="75c8038f49690378c7322e4d2e50f7d5073bb744c3f64317b7c3f4acffaa338d" exitCode=0 Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.377905 4757 generic.go:334] "Generic (PLEG): container finished" podID="a6624d05-e024-49f2-bf87-33e7ea4fccbb" containerID="cbe06e8892901366539846fa68ea4e379c32ca7b21843383dc4a72ac70f92b98" exitCode=0 Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.377915 4757 generic.go:334] "Generic (PLEG): container finished" podID="a6624d05-e024-49f2-bf87-33e7ea4fccbb" containerID="1580d51cb7ec22f70787157dc80236a312eca77785329c663d78c02ee9a0685d" exitCode=143 Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.377948 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.377968 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" event={"ID":"a6624d05-e024-49f2-bf87-33e7ea4fccbb","Type":"ContainerDied","Data":"3d34b3f2b73e53437a757437f264e99884eeedb564ce4e2a723a0345f0b383ab"} Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.378030 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" event={"ID":"a6624d05-e024-49f2-bf87-33e7ea4fccbb","Type":"ContainerDied","Data":"5852bc46ae07b295a97cca9f5ef7d06484c12744c0f6ee57b2d14acee319fcaf"} Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.378049 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" event={"ID":"a6624d05-e024-49f2-bf87-33e7ea4fccbb","Type":"ContainerDied","Data":"df2b012b79afa6379b4a7add72a9093bbdbfcc8d618d6eae350a773fdddacfa8"} Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.378069 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" event={"ID":"a6624d05-e024-49f2-bf87-33e7ea4fccbb","Type":"ContainerDied","Data":"4093bde478f4910c2b1e2341bb0b3f640454b131e31b9ab7ad5900b8260b7561"} Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.378125 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" event={"ID":"a6624d05-e024-49f2-bf87-33e7ea4fccbb","Type":"ContainerDied","Data":"75c8038f49690378c7322e4d2e50f7d5073bb744c3f64317b7c3f4acffaa338d"} Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.378147 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" event={"ID":"a6624d05-e024-49f2-bf87-33e7ea4fccbb","Type":"ContainerDied","Data":"cbe06e8892901366539846fa68ea4e379c32ca7b21843383dc4a72ac70f92b98"} Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.378163 4757 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3d34b3f2b73e53437a757437f264e99884eeedb564ce4e2a723a0345f0b383ab"} Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.378177 4757 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c59b4f6d5b02fd4640ea7a51803ca4d31b0440d8779c46f342b1e5af2e8a2bf0"} Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.378211 4757 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5852bc46ae07b295a97cca9f5ef7d06484c12744c0f6ee57b2d14acee319fcaf"} Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.378221 4757 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"df2b012b79afa6379b4a7add72a9093bbdbfcc8d618d6eae350a773fdddacfa8"} Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.378230 4757 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4093bde478f4910c2b1e2341bb0b3f640454b131e31b9ab7ad5900b8260b7561"} Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.378238 4757 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"75c8038f49690378c7322e4d2e50f7d5073bb744c3f64317b7c3f4acffaa338d"} Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.378246 4757 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"cbe06e8892901366539846fa68ea4e379c32ca7b21843383dc4a72ac70f92b98"} Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.378254 4757 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"1580d51cb7ec22f70787157dc80236a312eca77785329c663d78c02ee9a0685d"} Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.378262 4757 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"38df7deb98a16e465d5fdbad8e04c2e62c5cfb418a48fe9f01da335dba2907ed"} Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.378297 4757 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3fe5f1f425ddd770c518939dee061dc2641c1ca6c3538d8d30ac0724a6353936"} Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.378308 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" event={"ID":"a6624d05-e024-49f2-bf87-33e7ea4fccbb","Type":"ContainerDied","Data":"1580d51cb7ec22f70787157dc80236a312eca77785329c663d78c02ee9a0685d"} Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.378321 4757 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3d34b3f2b73e53437a757437f264e99884eeedb564ce4e2a723a0345f0b383ab"} Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.378332 4757 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c59b4f6d5b02fd4640ea7a51803ca4d31b0440d8779c46f342b1e5af2e8a2bf0"} Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.378339 4757 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5852bc46ae07b295a97cca9f5ef7d06484c12744c0f6ee57b2d14acee319fcaf"} Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.378371 4757 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"df2b012b79afa6379b4a7add72a9093bbdbfcc8d618d6eae350a773fdddacfa8"} Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.378381 4757 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4093bde478f4910c2b1e2341bb0b3f640454b131e31b9ab7ad5900b8260b7561"} Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.378388 4757 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"75c8038f49690378c7322e4d2e50f7d5073bb744c3f64317b7c3f4acffaa338d"} Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.378395 4757 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"cbe06e8892901366539846fa68ea4e379c32ca7b21843383dc4a72ac70f92b98"} Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.378402 4757 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"1580d51cb7ec22f70787157dc80236a312eca77785329c663d78c02ee9a0685d"} Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.378409 4757 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"38df7deb98a16e465d5fdbad8e04c2e62c5cfb418a48fe9f01da335dba2907ed"} Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.378417 4757 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3fe5f1f425ddd770c518939dee061dc2641c1ca6c3538d8d30ac0724a6353936"} Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.378454 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" event={"ID":"a6624d05-e024-49f2-bf87-33e7ea4fccbb","Type":"ContainerDied","Data":"38df7deb98a16e465d5fdbad8e04c2e62c5cfb418a48fe9f01da335dba2907ed"} Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.378469 4757 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3d34b3f2b73e53437a757437f264e99884eeedb564ce4e2a723a0345f0b383ab"} Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.378478 4757 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c59b4f6d5b02fd4640ea7a51803ca4d31b0440d8779c46f342b1e5af2e8a2bf0"} Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.378485 4757 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5852bc46ae07b295a97cca9f5ef7d06484c12744c0f6ee57b2d14acee319fcaf"} Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.378492 4757 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"df2b012b79afa6379b4a7add72a9093bbdbfcc8d618d6eae350a773fdddacfa8"} Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.378499 4757 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4093bde478f4910c2b1e2341bb0b3f640454b131e31b9ab7ad5900b8260b7561"} Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.378507 4757 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"75c8038f49690378c7322e4d2e50f7d5073bb744c3f64317b7c3f4acffaa338d"} Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.378540 4757 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"cbe06e8892901366539846fa68ea4e379c32ca7b21843383dc4a72ac70f92b98"} Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.378547 4757 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"1580d51cb7ec22f70787157dc80236a312eca77785329c663d78c02ee9a0685d"} Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.378554 4757 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"38df7deb98a16e465d5fdbad8e04c2e62c5cfb418a48fe9f01da335dba2907ed"} Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.378562 4757 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3fe5f1f425ddd770c518939dee061dc2641c1ca6c3538d8d30ac0724a6353936"} Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.377952 4757 generic.go:334] "Generic (PLEG): container finished" podID="a6624d05-e024-49f2-bf87-33e7ea4fccbb" containerID="38df7deb98a16e465d5fdbad8e04c2e62c5cfb418a48fe9f01da335dba2907ed" exitCode=143 Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.378636 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-58bhb" event={"ID":"a6624d05-e024-49f2-bf87-33e7ea4fccbb","Type":"ContainerDied","Data":"59e148336d62b9718667dd01888afdba55a69d24b59073039269c12f6f92ce5f"} Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.378650 4757 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3d34b3f2b73e53437a757437f264e99884eeedb564ce4e2a723a0345f0b383ab"} Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.378658 4757 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c59b4f6d5b02fd4640ea7a51803ca4d31b0440d8779c46f342b1e5af2e8a2bf0"} Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.378666 4757 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5852bc46ae07b295a97cca9f5ef7d06484c12744c0f6ee57b2d14acee319fcaf"} Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.378673 4757 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"df2b012b79afa6379b4a7add72a9093bbdbfcc8d618d6eae350a773fdddacfa8"} Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.378704 4757 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4093bde478f4910c2b1e2341bb0b3f640454b131e31b9ab7ad5900b8260b7561"} Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.378713 4757 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"75c8038f49690378c7322e4d2e50f7d5073bb744c3f64317b7c3f4acffaa338d"} Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.378720 4757 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"cbe06e8892901366539846fa68ea4e379c32ca7b21843383dc4a72ac70f92b98"} Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.378729 4757 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"1580d51cb7ec22f70787157dc80236a312eca77785329c663d78c02ee9a0685d"} Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.378736 4757 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"38df7deb98a16e465d5fdbad8e04c2e62c5cfb418a48fe9f01da335dba2907ed"} Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.378744 4757 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3fe5f1f425ddd770c518939dee061dc2641c1ca6c3538d8d30ac0724a6353936"} Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.406238 4757 scope.go:117] "RemoveContainer" containerID="3d34b3f2b73e53437a757437f264e99884eeedb564ce4e2a723a0345f0b383ab" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.441763 4757 scope.go:117] "RemoveContainer" containerID="c59b4f6d5b02fd4640ea7a51803ca4d31b0440d8779c46f342b1e5af2e8a2bf0" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.453713 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-58bhb"] Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.457323 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-58bhb"] Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.467067 4757 scope.go:117] "RemoveContainer" containerID="5852bc46ae07b295a97cca9f5ef7d06484c12744c0f6ee57b2d14acee319fcaf" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.479239 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/15b33d35-4328-4e9c-bc6e-43d0c847f253-host-run-ovn-kubernetes\") pod \"ovnkube-node-l46sb\" (UID: \"15b33d35-4328-4e9c-bc6e-43d0c847f253\") " pod="openshift-ovn-kubernetes/ovnkube-node-l46sb" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.479323 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/15b33d35-4328-4e9c-bc6e-43d0c847f253-log-socket\") pod \"ovnkube-node-l46sb\" (UID: \"15b33d35-4328-4e9c-bc6e-43d0c847f253\") " pod="openshift-ovn-kubernetes/ovnkube-node-l46sb" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.479354 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/15b33d35-4328-4e9c-bc6e-43d0c847f253-host-run-netns\") pod \"ovnkube-node-l46sb\" (UID: \"15b33d35-4328-4e9c-bc6e-43d0c847f253\") " pod="openshift-ovn-kubernetes/ovnkube-node-l46sb" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.479385 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/15b33d35-4328-4e9c-bc6e-43d0c847f253-host-slash\") pod \"ovnkube-node-l46sb\" (UID: \"15b33d35-4328-4e9c-bc6e-43d0c847f253\") " pod="openshift-ovn-kubernetes/ovnkube-node-l46sb" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.479410 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/15b33d35-4328-4e9c-bc6e-43d0c847f253-ovn-node-metrics-cert\") pod \"ovnkube-node-l46sb\" (UID: \"15b33d35-4328-4e9c-bc6e-43d0c847f253\") " pod="openshift-ovn-kubernetes/ovnkube-node-l46sb" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.479379 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/15b33d35-4328-4e9c-bc6e-43d0c847f253-host-run-ovn-kubernetes\") pod \"ovnkube-node-l46sb\" (UID: \"15b33d35-4328-4e9c-bc6e-43d0c847f253\") " pod="openshift-ovn-kubernetes/ovnkube-node-l46sb" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.479445 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/15b33d35-4328-4e9c-bc6e-43d0c847f253-run-ovn\") pod \"ovnkube-node-l46sb\" (UID: \"15b33d35-4328-4e9c-bc6e-43d0c847f253\") " pod="openshift-ovn-kubernetes/ovnkube-node-l46sb" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.479484 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/15b33d35-4328-4e9c-bc6e-43d0c847f253-log-socket\") pod \"ovnkube-node-l46sb\" (UID: \"15b33d35-4328-4e9c-bc6e-43d0c847f253\") " pod="openshift-ovn-kubernetes/ovnkube-node-l46sb" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.479500 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/15b33d35-4328-4e9c-bc6e-43d0c847f253-run-ovn\") pod \"ovnkube-node-l46sb\" (UID: \"15b33d35-4328-4e9c-bc6e-43d0c847f253\") " pod="openshift-ovn-kubernetes/ovnkube-node-l46sb" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.479736 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/15b33d35-4328-4e9c-bc6e-43d0c847f253-host-run-netns\") pod \"ovnkube-node-l46sb\" (UID: \"15b33d35-4328-4e9c-bc6e-43d0c847f253\") " pod="openshift-ovn-kubernetes/ovnkube-node-l46sb" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.479752 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/15b33d35-4328-4e9c-bc6e-43d0c847f253-host-slash\") pod \"ovnkube-node-l46sb\" (UID: \"15b33d35-4328-4e9c-bc6e-43d0c847f253\") " pod="openshift-ovn-kubernetes/ovnkube-node-l46sb" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.479825 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/15b33d35-4328-4e9c-bc6e-43d0c847f253-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-l46sb\" (UID: \"15b33d35-4328-4e9c-bc6e-43d0c847f253\") " pod="openshift-ovn-kubernetes/ovnkube-node-l46sb" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.479870 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/15b33d35-4328-4e9c-bc6e-43d0c847f253-ovnkube-script-lib\") pod \"ovnkube-node-l46sb\" (UID: \"15b33d35-4328-4e9c-bc6e-43d0c847f253\") " pod="openshift-ovn-kubernetes/ovnkube-node-l46sb" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.479919 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ttpfr\" (UniqueName: \"kubernetes.io/projected/15b33d35-4328-4e9c-bc6e-43d0c847f253-kube-api-access-ttpfr\") pod \"ovnkube-node-l46sb\" (UID: \"15b33d35-4328-4e9c-bc6e-43d0c847f253\") " pod="openshift-ovn-kubernetes/ovnkube-node-l46sb" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.479966 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/15b33d35-4328-4e9c-bc6e-43d0c847f253-run-openvswitch\") pod \"ovnkube-node-l46sb\" (UID: \"15b33d35-4328-4e9c-bc6e-43d0c847f253\") " pod="openshift-ovn-kubernetes/ovnkube-node-l46sb" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.479994 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/15b33d35-4328-4e9c-bc6e-43d0c847f253-ovnkube-config\") pod \"ovnkube-node-l46sb\" (UID: \"15b33d35-4328-4e9c-bc6e-43d0c847f253\") " pod="openshift-ovn-kubernetes/ovnkube-node-l46sb" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.480026 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/15b33d35-4328-4e9c-bc6e-43d0c847f253-host-cni-bin\") pod \"ovnkube-node-l46sb\" (UID: \"15b33d35-4328-4e9c-bc6e-43d0c847f253\") " pod="openshift-ovn-kubernetes/ovnkube-node-l46sb" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.480054 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/15b33d35-4328-4e9c-bc6e-43d0c847f253-host-kubelet\") pod \"ovnkube-node-l46sb\" (UID: \"15b33d35-4328-4e9c-bc6e-43d0c847f253\") " pod="openshift-ovn-kubernetes/ovnkube-node-l46sb" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.480068 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/15b33d35-4328-4e9c-bc6e-43d0c847f253-env-overrides\") pod \"ovnkube-node-l46sb\" (UID: \"15b33d35-4328-4e9c-bc6e-43d0c847f253\") " pod="openshift-ovn-kubernetes/ovnkube-node-l46sb" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.480177 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/15b33d35-4328-4e9c-bc6e-43d0c847f253-node-log\") pod \"ovnkube-node-l46sb\" (UID: \"15b33d35-4328-4e9c-bc6e-43d0c847f253\") " pod="openshift-ovn-kubernetes/ovnkube-node-l46sb" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.480196 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/15b33d35-4328-4e9c-bc6e-43d0c847f253-systemd-units\") pod \"ovnkube-node-l46sb\" (UID: \"15b33d35-4328-4e9c-bc6e-43d0c847f253\") " pod="openshift-ovn-kubernetes/ovnkube-node-l46sb" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.480211 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/15b33d35-4328-4e9c-bc6e-43d0c847f253-run-systemd\") pod \"ovnkube-node-l46sb\" (UID: \"15b33d35-4328-4e9c-bc6e-43d0c847f253\") " pod="openshift-ovn-kubernetes/ovnkube-node-l46sb" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.480248 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/15b33d35-4328-4e9c-bc6e-43d0c847f253-var-lib-openvswitch\") pod \"ovnkube-node-l46sb\" (UID: \"15b33d35-4328-4e9c-bc6e-43d0c847f253\") " pod="openshift-ovn-kubernetes/ovnkube-node-l46sb" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.480275 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/15b33d35-4328-4e9c-bc6e-43d0c847f253-etc-openvswitch\") pod \"ovnkube-node-l46sb\" (UID: \"15b33d35-4328-4e9c-bc6e-43d0c847f253\") " pod="openshift-ovn-kubernetes/ovnkube-node-l46sb" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.480299 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/15b33d35-4328-4e9c-bc6e-43d0c847f253-host-cni-netd\") pod \"ovnkube-node-l46sb\" (UID: \"15b33d35-4328-4e9c-bc6e-43d0c847f253\") " pod="openshift-ovn-kubernetes/ovnkube-node-l46sb" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.480290 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/15b33d35-4328-4e9c-bc6e-43d0c847f253-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-l46sb\" (UID: \"15b33d35-4328-4e9c-bc6e-43d0c847f253\") " pod="openshift-ovn-kubernetes/ovnkube-node-l46sb" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.480569 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/15b33d35-4328-4e9c-bc6e-43d0c847f253-run-systemd\") pod \"ovnkube-node-l46sb\" (UID: \"15b33d35-4328-4e9c-bc6e-43d0c847f253\") " pod="openshift-ovn-kubernetes/ovnkube-node-l46sb" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.480684 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/15b33d35-4328-4e9c-bc6e-43d0c847f253-node-log\") pod \"ovnkube-node-l46sb\" (UID: \"15b33d35-4328-4e9c-bc6e-43d0c847f253\") " pod="openshift-ovn-kubernetes/ovnkube-node-l46sb" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.480728 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/15b33d35-4328-4e9c-bc6e-43d0c847f253-host-cni-netd\") pod \"ovnkube-node-l46sb\" (UID: \"15b33d35-4328-4e9c-bc6e-43d0c847f253\") " pod="openshift-ovn-kubernetes/ovnkube-node-l46sb" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.480711 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/15b33d35-4328-4e9c-bc6e-43d0c847f253-etc-openvswitch\") pod \"ovnkube-node-l46sb\" (UID: \"15b33d35-4328-4e9c-bc6e-43d0c847f253\") " pod="openshift-ovn-kubernetes/ovnkube-node-l46sb" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.480671 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/15b33d35-4328-4e9c-bc6e-43d0c847f253-var-lib-openvswitch\") pod \"ovnkube-node-l46sb\" (UID: \"15b33d35-4328-4e9c-bc6e-43d0c847f253\") " pod="openshift-ovn-kubernetes/ovnkube-node-l46sb" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.480758 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/15b33d35-4328-4e9c-bc6e-43d0c847f253-systemd-units\") pod \"ovnkube-node-l46sb\" (UID: \"15b33d35-4328-4e9c-bc6e-43d0c847f253\") " pod="openshift-ovn-kubernetes/ovnkube-node-l46sb" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.480776 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/15b33d35-4328-4e9c-bc6e-43d0c847f253-host-cni-bin\") pod \"ovnkube-node-l46sb\" (UID: \"15b33d35-4328-4e9c-bc6e-43d0c847f253\") " pod="openshift-ovn-kubernetes/ovnkube-node-l46sb" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.480813 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/15b33d35-4328-4e9c-bc6e-43d0c847f253-run-openvswitch\") pod \"ovnkube-node-l46sb\" (UID: \"15b33d35-4328-4e9c-bc6e-43d0c847f253\") " pod="openshift-ovn-kubernetes/ovnkube-node-l46sb" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.480828 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/15b33d35-4328-4e9c-bc6e-43d0c847f253-host-kubelet\") pod \"ovnkube-node-l46sb\" (UID: \"15b33d35-4328-4e9c-bc6e-43d0c847f253\") " pod="openshift-ovn-kubernetes/ovnkube-node-l46sb" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.480916 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/15b33d35-4328-4e9c-bc6e-43d0c847f253-ovnkube-script-lib\") pod \"ovnkube-node-l46sb\" (UID: \"15b33d35-4328-4e9c-bc6e-43d0c847f253\") " pod="openshift-ovn-kubernetes/ovnkube-node-l46sb" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.481224 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/15b33d35-4328-4e9c-bc6e-43d0c847f253-env-overrides\") pod \"ovnkube-node-l46sb\" (UID: \"15b33d35-4328-4e9c-bc6e-43d0c847f253\") " pod="openshift-ovn-kubernetes/ovnkube-node-l46sb" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.481326 4757 scope.go:117] "RemoveContainer" containerID="df2b012b79afa6379b4a7add72a9093bbdbfcc8d618d6eae350a773fdddacfa8" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.482115 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/15b33d35-4328-4e9c-bc6e-43d0c847f253-ovnkube-config\") pod \"ovnkube-node-l46sb\" (UID: \"15b33d35-4328-4e9c-bc6e-43d0c847f253\") " pod="openshift-ovn-kubernetes/ovnkube-node-l46sb" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.484587 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/15b33d35-4328-4e9c-bc6e-43d0c847f253-ovn-node-metrics-cert\") pod \"ovnkube-node-l46sb\" (UID: \"15b33d35-4328-4e9c-bc6e-43d0c847f253\") " pod="openshift-ovn-kubernetes/ovnkube-node-l46sb" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.503302 4757 scope.go:117] "RemoveContainer" containerID="4093bde478f4910c2b1e2341bb0b3f640454b131e31b9ab7ad5900b8260b7561" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.503695 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ttpfr\" (UniqueName: \"kubernetes.io/projected/15b33d35-4328-4e9c-bc6e-43d0c847f253-kube-api-access-ttpfr\") pod \"ovnkube-node-l46sb\" (UID: \"15b33d35-4328-4e9c-bc6e-43d0c847f253\") " pod="openshift-ovn-kubernetes/ovnkube-node-l46sb" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.518391 4757 scope.go:117] "RemoveContainer" containerID="75c8038f49690378c7322e4d2e50f7d5073bb744c3f64317b7c3f4acffaa338d" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.534217 4757 scope.go:117] "RemoveContainer" containerID="cbe06e8892901366539846fa68ea4e379c32ca7b21843383dc4a72ac70f92b98" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.545054 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-l46sb" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.548432 4757 scope.go:117] "RemoveContainer" containerID="1580d51cb7ec22f70787157dc80236a312eca77785329c663d78c02ee9a0685d" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.568310 4757 scope.go:117] "RemoveContainer" containerID="38df7deb98a16e465d5fdbad8e04c2e62c5cfb418a48fe9f01da335dba2907ed" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.603345 4757 scope.go:117] "RemoveContainer" containerID="3fe5f1f425ddd770c518939dee061dc2641c1ca6c3538d8d30ac0724a6353936" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.621190 4757 scope.go:117] "RemoveContainer" containerID="3d34b3f2b73e53437a757437f264e99884eeedb564ce4e2a723a0345f0b383ab" Oct 06 13:49:24 crc kubenswrapper[4757]: E1006 13:49:24.621662 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3d34b3f2b73e53437a757437f264e99884eeedb564ce4e2a723a0345f0b383ab\": container with ID starting with 3d34b3f2b73e53437a757437f264e99884eeedb564ce4e2a723a0345f0b383ab not found: ID does not exist" containerID="3d34b3f2b73e53437a757437f264e99884eeedb564ce4e2a723a0345f0b383ab" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.621714 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3d34b3f2b73e53437a757437f264e99884eeedb564ce4e2a723a0345f0b383ab"} err="failed to get container status \"3d34b3f2b73e53437a757437f264e99884eeedb564ce4e2a723a0345f0b383ab\": rpc error: code = NotFound desc = could not find container \"3d34b3f2b73e53437a757437f264e99884eeedb564ce4e2a723a0345f0b383ab\": container with ID starting with 3d34b3f2b73e53437a757437f264e99884eeedb564ce4e2a723a0345f0b383ab not found: ID does not exist" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.621744 4757 scope.go:117] "RemoveContainer" containerID="c59b4f6d5b02fd4640ea7a51803ca4d31b0440d8779c46f342b1e5af2e8a2bf0" Oct 06 13:49:24 crc kubenswrapper[4757]: E1006 13:49:24.622160 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c59b4f6d5b02fd4640ea7a51803ca4d31b0440d8779c46f342b1e5af2e8a2bf0\": container with ID starting with c59b4f6d5b02fd4640ea7a51803ca4d31b0440d8779c46f342b1e5af2e8a2bf0 not found: ID does not exist" containerID="c59b4f6d5b02fd4640ea7a51803ca4d31b0440d8779c46f342b1e5af2e8a2bf0" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.622190 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c59b4f6d5b02fd4640ea7a51803ca4d31b0440d8779c46f342b1e5af2e8a2bf0"} err="failed to get container status \"c59b4f6d5b02fd4640ea7a51803ca4d31b0440d8779c46f342b1e5af2e8a2bf0\": rpc error: code = NotFound desc = could not find container \"c59b4f6d5b02fd4640ea7a51803ca4d31b0440d8779c46f342b1e5af2e8a2bf0\": container with ID starting with c59b4f6d5b02fd4640ea7a51803ca4d31b0440d8779c46f342b1e5af2e8a2bf0 not found: ID does not exist" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.622212 4757 scope.go:117] "RemoveContainer" containerID="5852bc46ae07b295a97cca9f5ef7d06484c12744c0f6ee57b2d14acee319fcaf" Oct 06 13:49:24 crc kubenswrapper[4757]: E1006 13:49:24.622521 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5852bc46ae07b295a97cca9f5ef7d06484c12744c0f6ee57b2d14acee319fcaf\": container with ID starting with 5852bc46ae07b295a97cca9f5ef7d06484c12744c0f6ee57b2d14acee319fcaf not found: ID does not exist" containerID="5852bc46ae07b295a97cca9f5ef7d06484c12744c0f6ee57b2d14acee319fcaf" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.622564 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5852bc46ae07b295a97cca9f5ef7d06484c12744c0f6ee57b2d14acee319fcaf"} err="failed to get container status \"5852bc46ae07b295a97cca9f5ef7d06484c12744c0f6ee57b2d14acee319fcaf\": rpc error: code = NotFound desc = could not find container \"5852bc46ae07b295a97cca9f5ef7d06484c12744c0f6ee57b2d14acee319fcaf\": container with ID starting with 5852bc46ae07b295a97cca9f5ef7d06484c12744c0f6ee57b2d14acee319fcaf not found: ID does not exist" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.622589 4757 scope.go:117] "RemoveContainer" containerID="df2b012b79afa6379b4a7add72a9093bbdbfcc8d618d6eae350a773fdddacfa8" Oct 06 13:49:24 crc kubenswrapper[4757]: E1006 13:49:24.622991 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"df2b012b79afa6379b4a7add72a9093bbdbfcc8d618d6eae350a773fdddacfa8\": container with ID starting with df2b012b79afa6379b4a7add72a9093bbdbfcc8d618d6eae350a773fdddacfa8 not found: ID does not exist" containerID="df2b012b79afa6379b4a7add72a9093bbdbfcc8d618d6eae350a773fdddacfa8" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.623013 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"df2b012b79afa6379b4a7add72a9093bbdbfcc8d618d6eae350a773fdddacfa8"} err="failed to get container status \"df2b012b79afa6379b4a7add72a9093bbdbfcc8d618d6eae350a773fdddacfa8\": rpc error: code = NotFound desc = could not find container \"df2b012b79afa6379b4a7add72a9093bbdbfcc8d618d6eae350a773fdddacfa8\": container with ID starting with df2b012b79afa6379b4a7add72a9093bbdbfcc8d618d6eae350a773fdddacfa8 not found: ID does not exist" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.623026 4757 scope.go:117] "RemoveContainer" containerID="4093bde478f4910c2b1e2341bb0b3f640454b131e31b9ab7ad5900b8260b7561" Oct 06 13:49:24 crc kubenswrapper[4757]: E1006 13:49:24.623302 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4093bde478f4910c2b1e2341bb0b3f640454b131e31b9ab7ad5900b8260b7561\": container with ID starting with 4093bde478f4910c2b1e2341bb0b3f640454b131e31b9ab7ad5900b8260b7561 not found: ID does not exist" containerID="4093bde478f4910c2b1e2341bb0b3f640454b131e31b9ab7ad5900b8260b7561" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.623334 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4093bde478f4910c2b1e2341bb0b3f640454b131e31b9ab7ad5900b8260b7561"} err="failed to get container status \"4093bde478f4910c2b1e2341bb0b3f640454b131e31b9ab7ad5900b8260b7561\": rpc error: code = NotFound desc = could not find container \"4093bde478f4910c2b1e2341bb0b3f640454b131e31b9ab7ad5900b8260b7561\": container with ID starting with 4093bde478f4910c2b1e2341bb0b3f640454b131e31b9ab7ad5900b8260b7561 not found: ID does not exist" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.623349 4757 scope.go:117] "RemoveContainer" containerID="75c8038f49690378c7322e4d2e50f7d5073bb744c3f64317b7c3f4acffaa338d" Oct 06 13:49:24 crc kubenswrapper[4757]: E1006 13:49:24.623584 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"75c8038f49690378c7322e4d2e50f7d5073bb744c3f64317b7c3f4acffaa338d\": container with ID starting with 75c8038f49690378c7322e4d2e50f7d5073bb744c3f64317b7c3f4acffaa338d not found: ID does not exist" containerID="75c8038f49690378c7322e4d2e50f7d5073bb744c3f64317b7c3f4acffaa338d" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.623613 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"75c8038f49690378c7322e4d2e50f7d5073bb744c3f64317b7c3f4acffaa338d"} err="failed to get container status \"75c8038f49690378c7322e4d2e50f7d5073bb744c3f64317b7c3f4acffaa338d\": rpc error: code = NotFound desc = could not find container \"75c8038f49690378c7322e4d2e50f7d5073bb744c3f64317b7c3f4acffaa338d\": container with ID starting with 75c8038f49690378c7322e4d2e50f7d5073bb744c3f64317b7c3f4acffaa338d not found: ID does not exist" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.623633 4757 scope.go:117] "RemoveContainer" containerID="cbe06e8892901366539846fa68ea4e379c32ca7b21843383dc4a72ac70f92b98" Oct 06 13:49:24 crc kubenswrapper[4757]: E1006 13:49:24.623906 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cbe06e8892901366539846fa68ea4e379c32ca7b21843383dc4a72ac70f92b98\": container with ID starting with cbe06e8892901366539846fa68ea4e379c32ca7b21843383dc4a72ac70f92b98 not found: ID does not exist" containerID="cbe06e8892901366539846fa68ea4e379c32ca7b21843383dc4a72ac70f92b98" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.623932 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cbe06e8892901366539846fa68ea4e379c32ca7b21843383dc4a72ac70f92b98"} err="failed to get container status \"cbe06e8892901366539846fa68ea4e379c32ca7b21843383dc4a72ac70f92b98\": rpc error: code = NotFound desc = could not find container \"cbe06e8892901366539846fa68ea4e379c32ca7b21843383dc4a72ac70f92b98\": container with ID starting with cbe06e8892901366539846fa68ea4e379c32ca7b21843383dc4a72ac70f92b98 not found: ID does not exist" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.623947 4757 scope.go:117] "RemoveContainer" containerID="1580d51cb7ec22f70787157dc80236a312eca77785329c663d78c02ee9a0685d" Oct 06 13:49:24 crc kubenswrapper[4757]: E1006 13:49:24.624183 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1580d51cb7ec22f70787157dc80236a312eca77785329c663d78c02ee9a0685d\": container with ID starting with 1580d51cb7ec22f70787157dc80236a312eca77785329c663d78c02ee9a0685d not found: ID does not exist" containerID="1580d51cb7ec22f70787157dc80236a312eca77785329c663d78c02ee9a0685d" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.624205 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1580d51cb7ec22f70787157dc80236a312eca77785329c663d78c02ee9a0685d"} err="failed to get container status \"1580d51cb7ec22f70787157dc80236a312eca77785329c663d78c02ee9a0685d\": rpc error: code = NotFound desc = could not find container \"1580d51cb7ec22f70787157dc80236a312eca77785329c663d78c02ee9a0685d\": container with ID starting with 1580d51cb7ec22f70787157dc80236a312eca77785329c663d78c02ee9a0685d not found: ID does not exist" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.624220 4757 scope.go:117] "RemoveContainer" containerID="38df7deb98a16e465d5fdbad8e04c2e62c5cfb418a48fe9f01da335dba2907ed" Oct 06 13:49:24 crc kubenswrapper[4757]: E1006 13:49:24.624575 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"38df7deb98a16e465d5fdbad8e04c2e62c5cfb418a48fe9f01da335dba2907ed\": container with ID starting with 38df7deb98a16e465d5fdbad8e04c2e62c5cfb418a48fe9f01da335dba2907ed not found: ID does not exist" containerID="38df7deb98a16e465d5fdbad8e04c2e62c5cfb418a48fe9f01da335dba2907ed" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.624600 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"38df7deb98a16e465d5fdbad8e04c2e62c5cfb418a48fe9f01da335dba2907ed"} err="failed to get container status \"38df7deb98a16e465d5fdbad8e04c2e62c5cfb418a48fe9f01da335dba2907ed\": rpc error: code = NotFound desc = could not find container \"38df7deb98a16e465d5fdbad8e04c2e62c5cfb418a48fe9f01da335dba2907ed\": container with ID starting with 38df7deb98a16e465d5fdbad8e04c2e62c5cfb418a48fe9f01da335dba2907ed not found: ID does not exist" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.624615 4757 scope.go:117] "RemoveContainer" containerID="3fe5f1f425ddd770c518939dee061dc2641c1ca6c3538d8d30ac0724a6353936" Oct 06 13:49:24 crc kubenswrapper[4757]: E1006 13:49:24.624845 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3fe5f1f425ddd770c518939dee061dc2641c1ca6c3538d8d30ac0724a6353936\": container with ID starting with 3fe5f1f425ddd770c518939dee061dc2641c1ca6c3538d8d30ac0724a6353936 not found: ID does not exist" containerID="3fe5f1f425ddd770c518939dee061dc2641c1ca6c3538d8d30ac0724a6353936" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.624869 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3fe5f1f425ddd770c518939dee061dc2641c1ca6c3538d8d30ac0724a6353936"} err="failed to get container status \"3fe5f1f425ddd770c518939dee061dc2641c1ca6c3538d8d30ac0724a6353936\": rpc error: code = NotFound desc = could not find container \"3fe5f1f425ddd770c518939dee061dc2641c1ca6c3538d8d30ac0724a6353936\": container with ID starting with 3fe5f1f425ddd770c518939dee061dc2641c1ca6c3538d8d30ac0724a6353936 not found: ID does not exist" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.624886 4757 scope.go:117] "RemoveContainer" containerID="3d34b3f2b73e53437a757437f264e99884eeedb564ce4e2a723a0345f0b383ab" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.625086 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3d34b3f2b73e53437a757437f264e99884eeedb564ce4e2a723a0345f0b383ab"} err="failed to get container status \"3d34b3f2b73e53437a757437f264e99884eeedb564ce4e2a723a0345f0b383ab\": rpc error: code = NotFound desc = could not find container \"3d34b3f2b73e53437a757437f264e99884eeedb564ce4e2a723a0345f0b383ab\": container with ID starting with 3d34b3f2b73e53437a757437f264e99884eeedb564ce4e2a723a0345f0b383ab not found: ID does not exist" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.625123 4757 scope.go:117] "RemoveContainer" containerID="c59b4f6d5b02fd4640ea7a51803ca4d31b0440d8779c46f342b1e5af2e8a2bf0" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.625393 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c59b4f6d5b02fd4640ea7a51803ca4d31b0440d8779c46f342b1e5af2e8a2bf0"} err="failed to get container status \"c59b4f6d5b02fd4640ea7a51803ca4d31b0440d8779c46f342b1e5af2e8a2bf0\": rpc error: code = NotFound desc = could not find container \"c59b4f6d5b02fd4640ea7a51803ca4d31b0440d8779c46f342b1e5af2e8a2bf0\": container with ID starting with c59b4f6d5b02fd4640ea7a51803ca4d31b0440d8779c46f342b1e5af2e8a2bf0 not found: ID does not exist" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.625415 4757 scope.go:117] "RemoveContainer" containerID="5852bc46ae07b295a97cca9f5ef7d06484c12744c0f6ee57b2d14acee319fcaf" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.625697 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5852bc46ae07b295a97cca9f5ef7d06484c12744c0f6ee57b2d14acee319fcaf"} err="failed to get container status \"5852bc46ae07b295a97cca9f5ef7d06484c12744c0f6ee57b2d14acee319fcaf\": rpc error: code = NotFound desc = could not find container \"5852bc46ae07b295a97cca9f5ef7d06484c12744c0f6ee57b2d14acee319fcaf\": container with ID starting with 5852bc46ae07b295a97cca9f5ef7d06484c12744c0f6ee57b2d14acee319fcaf not found: ID does not exist" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.625719 4757 scope.go:117] "RemoveContainer" containerID="df2b012b79afa6379b4a7add72a9093bbdbfcc8d618d6eae350a773fdddacfa8" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.625973 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"df2b012b79afa6379b4a7add72a9093bbdbfcc8d618d6eae350a773fdddacfa8"} err="failed to get container status \"df2b012b79afa6379b4a7add72a9093bbdbfcc8d618d6eae350a773fdddacfa8\": rpc error: code = NotFound desc = could not find container \"df2b012b79afa6379b4a7add72a9093bbdbfcc8d618d6eae350a773fdddacfa8\": container with ID starting with df2b012b79afa6379b4a7add72a9093bbdbfcc8d618d6eae350a773fdddacfa8 not found: ID does not exist" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.626034 4757 scope.go:117] "RemoveContainer" containerID="4093bde478f4910c2b1e2341bb0b3f640454b131e31b9ab7ad5900b8260b7561" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.626374 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4093bde478f4910c2b1e2341bb0b3f640454b131e31b9ab7ad5900b8260b7561"} err="failed to get container status \"4093bde478f4910c2b1e2341bb0b3f640454b131e31b9ab7ad5900b8260b7561\": rpc error: code = NotFound desc = could not find container \"4093bde478f4910c2b1e2341bb0b3f640454b131e31b9ab7ad5900b8260b7561\": container with ID starting with 4093bde478f4910c2b1e2341bb0b3f640454b131e31b9ab7ad5900b8260b7561 not found: ID does not exist" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.626417 4757 scope.go:117] "RemoveContainer" containerID="75c8038f49690378c7322e4d2e50f7d5073bb744c3f64317b7c3f4acffaa338d" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.626709 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"75c8038f49690378c7322e4d2e50f7d5073bb744c3f64317b7c3f4acffaa338d"} err="failed to get container status \"75c8038f49690378c7322e4d2e50f7d5073bb744c3f64317b7c3f4acffaa338d\": rpc error: code = NotFound desc = could not find container \"75c8038f49690378c7322e4d2e50f7d5073bb744c3f64317b7c3f4acffaa338d\": container with ID starting with 75c8038f49690378c7322e4d2e50f7d5073bb744c3f64317b7c3f4acffaa338d not found: ID does not exist" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.626730 4757 scope.go:117] "RemoveContainer" containerID="cbe06e8892901366539846fa68ea4e379c32ca7b21843383dc4a72ac70f92b98" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.626967 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cbe06e8892901366539846fa68ea4e379c32ca7b21843383dc4a72ac70f92b98"} err="failed to get container status \"cbe06e8892901366539846fa68ea4e379c32ca7b21843383dc4a72ac70f92b98\": rpc error: code = NotFound desc = could not find container \"cbe06e8892901366539846fa68ea4e379c32ca7b21843383dc4a72ac70f92b98\": container with ID starting with cbe06e8892901366539846fa68ea4e379c32ca7b21843383dc4a72ac70f92b98 not found: ID does not exist" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.626994 4757 scope.go:117] "RemoveContainer" containerID="1580d51cb7ec22f70787157dc80236a312eca77785329c663d78c02ee9a0685d" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.627230 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1580d51cb7ec22f70787157dc80236a312eca77785329c663d78c02ee9a0685d"} err="failed to get container status \"1580d51cb7ec22f70787157dc80236a312eca77785329c663d78c02ee9a0685d\": rpc error: code = NotFound desc = could not find container \"1580d51cb7ec22f70787157dc80236a312eca77785329c663d78c02ee9a0685d\": container with ID starting with 1580d51cb7ec22f70787157dc80236a312eca77785329c663d78c02ee9a0685d not found: ID does not exist" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.627248 4757 scope.go:117] "RemoveContainer" containerID="38df7deb98a16e465d5fdbad8e04c2e62c5cfb418a48fe9f01da335dba2907ed" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.627487 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"38df7deb98a16e465d5fdbad8e04c2e62c5cfb418a48fe9f01da335dba2907ed"} err="failed to get container status \"38df7deb98a16e465d5fdbad8e04c2e62c5cfb418a48fe9f01da335dba2907ed\": rpc error: code = NotFound desc = could not find container \"38df7deb98a16e465d5fdbad8e04c2e62c5cfb418a48fe9f01da335dba2907ed\": container with ID starting with 38df7deb98a16e465d5fdbad8e04c2e62c5cfb418a48fe9f01da335dba2907ed not found: ID does not exist" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.627504 4757 scope.go:117] "RemoveContainer" containerID="3fe5f1f425ddd770c518939dee061dc2641c1ca6c3538d8d30ac0724a6353936" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.627797 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3fe5f1f425ddd770c518939dee061dc2641c1ca6c3538d8d30ac0724a6353936"} err="failed to get container status \"3fe5f1f425ddd770c518939dee061dc2641c1ca6c3538d8d30ac0724a6353936\": rpc error: code = NotFound desc = could not find container \"3fe5f1f425ddd770c518939dee061dc2641c1ca6c3538d8d30ac0724a6353936\": container with ID starting with 3fe5f1f425ddd770c518939dee061dc2641c1ca6c3538d8d30ac0724a6353936 not found: ID does not exist" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.627822 4757 scope.go:117] "RemoveContainer" containerID="3d34b3f2b73e53437a757437f264e99884eeedb564ce4e2a723a0345f0b383ab" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.628051 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3d34b3f2b73e53437a757437f264e99884eeedb564ce4e2a723a0345f0b383ab"} err="failed to get container status \"3d34b3f2b73e53437a757437f264e99884eeedb564ce4e2a723a0345f0b383ab\": rpc error: code = NotFound desc = could not find container \"3d34b3f2b73e53437a757437f264e99884eeedb564ce4e2a723a0345f0b383ab\": container with ID starting with 3d34b3f2b73e53437a757437f264e99884eeedb564ce4e2a723a0345f0b383ab not found: ID does not exist" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.628070 4757 scope.go:117] "RemoveContainer" containerID="c59b4f6d5b02fd4640ea7a51803ca4d31b0440d8779c46f342b1e5af2e8a2bf0" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.628338 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c59b4f6d5b02fd4640ea7a51803ca4d31b0440d8779c46f342b1e5af2e8a2bf0"} err="failed to get container status \"c59b4f6d5b02fd4640ea7a51803ca4d31b0440d8779c46f342b1e5af2e8a2bf0\": rpc error: code = NotFound desc = could not find container \"c59b4f6d5b02fd4640ea7a51803ca4d31b0440d8779c46f342b1e5af2e8a2bf0\": container with ID starting with c59b4f6d5b02fd4640ea7a51803ca4d31b0440d8779c46f342b1e5af2e8a2bf0 not found: ID does not exist" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.628364 4757 scope.go:117] "RemoveContainer" containerID="5852bc46ae07b295a97cca9f5ef7d06484c12744c0f6ee57b2d14acee319fcaf" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.628877 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5852bc46ae07b295a97cca9f5ef7d06484c12744c0f6ee57b2d14acee319fcaf"} err="failed to get container status \"5852bc46ae07b295a97cca9f5ef7d06484c12744c0f6ee57b2d14acee319fcaf\": rpc error: code = NotFound desc = could not find container \"5852bc46ae07b295a97cca9f5ef7d06484c12744c0f6ee57b2d14acee319fcaf\": container with ID starting with 5852bc46ae07b295a97cca9f5ef7d06484c12744c0f6ee57b2d14acee319fcaf not found: ID does not exist" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.628908 4757 scope.go:117] "RemoveContainer" containerID="df2b012b79afa6379b4a7add72a9093bbdbfcc8d618d6eae350a773fdddacfa8" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.629212 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"df2b012b79afa6379b4a7add72a9093bbdbfcc8d618d6eae350a773fdddacfa8"} err="failed to get container status \"df2b012b79afa6379b4a7add72a9093bbdbfcc8d618d6eae350a773fdddacfa8\": rpc error: code = NotFound desc = could not find container \"df2b012b79afa6379b4a7add72a9093bbdbfcc8d618d6eae350a773fdddacfa8\": container with ID starting with df2b012b79afa6379b4a7add72a9093bbdbfcc8d618d6eae350a773fdddacfa8 not found: ID does not exist" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.629251 4757 scope.go:117] "RemoveContainer" containerID="4093bde478f4910c2b1e2341bb0b3f640454b131e31b9ab7ad5900b8260b7561" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.629607 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4093bde478f4910c2b1e2341bb0b3f640454b131e31b9ab7ad5900b8260b7561"} err="failed to get container status \"4093bde478f4910c2b1e2341bb0b3f640454b131e31b9ab7ad5900b8260b7561\": rpc error: code = NotFound desc = could not find container \"4093bde478f4910c2b1e2341bb0b3f640454b131e31b9ab7ad5900b8260b7561\": container with ID starting with 4093bde478f4910c2b1e2341bb0b3f640454b131e31b9ab7ad5900b8260b7561 not found: ID does not exist" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.629630 4757 scope.go:117] "RemoveContainer" containerID="75c8038f49690378c7322e4d2e50f7d5073bb744c3f64317b7c3f4acffaa338d" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.630050 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"75c8038f49690378c7322e4d2e50f7d5073bb744c3f64317b7c3f4acffaa338d"} err="failed to get container status \"75c8038f49690378c7322e4d2e50f7d5073bb744c3f64317b7c3f4acffaa338d\": rpc error: code = NotFound desc = could not find container \"75c8038f49690378c7322e4d2e50f7d5073bb744c3f64317b7c3f4acffaa338d\": container with ID starting with 75c8038f49690378c7322e4d2e50f7d5073bb744c3f64317b7c3f4acffaa338d not found: ID does not exist" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.630071 4757 scope.go:117] "RemoveContainer" containerID="cbe06e8892901366539846fa68ea4e379c32ca7b21843383dc4a72ac70f92b98" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.630327 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cbe06e8892901366539846fa68ea4e379c32ca7b21843383dc4a72ac70f92b98"} err="failed to get container status \"cbe06e8892901366539846fa68ea4e379c32ca7b21843383dc4a72ac70f92b98\": rpc error: code = NotFound desc = could not find container \"cbe06e8892901366539846fa68ea4e379c32ca7b21843383dc4a72ac70f92b98\": container with ID starting with cbe06e8892901366539846fa68ea4e379c32ca7b21843383dc4a72ac70f92b98 not found: ID does not exist" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.630347 4757 scope.go:117] "RemoveContainer" containerID="1580d51cb7ec22f70787157dc80236a312eca77785329c663d78c02ee9a0685d" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.630559 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1580d51cb7ec22f70787157dc80236a312eca77785329c663d78c02ee9a0685d"} err="failed to get container status \"1580d51cb7ec22f70787157dc80236a312eca77785329c663d78c02ee9a0685d\": rpc error: code = NotFound desc = could not find container \"1580d51cb7ec22f70787157dc80236a312eca77785329c663d78c02ee9a0685d\": container with ID starting with 1580d51cb7ec22f70787157dc80236a312eca77785329c663d78c02ee9a0685d not found: ID does not exist" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.630582 4757 scope.go:117] "RemoveContainer" containerID="38df7deb98a16e465d5fdbad8e04c2e62c5cfb418a48fe9f01da335dba2907ed" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.630797 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"38df7deb98a16e465d5fdbad8e04c2e62c5cfb418a48fe9f01da335dba2907ed"} err="failed to get container status \"38df7deb98a16e465d5fdbad8e04c2e62c5cfb418a48fe9f01da335dba2907ed\": rpc error: code = NotFound desc = could not find container \"38df7deb98a16e465d5fdbad8e04c2e62c5cfb418a48fe9f01da335dba2907ed\": container with ID starting with 38df7deb98a16e465d5fdbad8e04c2e62c5cfb418a48fe9f01da335dba2907ed not found: ID does not exist" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.630819 4757 scope.go:117] "RemoveContainer" containerID="3fe5f1f425ddd770c518939dee061dc2641c1ca6c3538d8d30ac0724a6353936" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.631385 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3fe5f1f425ddd770c518939dee061dc2641c1ca6c3538d8d30ac0724a6353936"} err="failed to get container status \"3fe5f1f425ddd770c518939dee061dc2641c1ca6c3538d8d30ac0724a6353936\": rpc error: code = NotFound desc = could not find container \"3fe5f1f425ddd770c518939dee061dc2641c1ca6c3538d8d30ac0724a6353936\": container with ID starting with 3fe5f1f425ddd770c518939dee061dc2641c1ca6c3538d8d30ac0724a6353936 not found: ID does not exist" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.631428 4757 scope.go:117] "RemoveContainer" containerID="3d34b3f2b73e53437a757437f264e99884eeedb564ce4e2a723a0345f0b383ab" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.631723 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3d34b3f2b73e53437a757437f264e99884eeedb564ce4e2a723a0345f0b383ab"} err="failed to get container status \"3d34b3f2b73e53437a757437f264e99884eeedb564ce4e2a723a0345f0b383ab\": rpc error: code = NotFound desc = could not find container \"3d34b3f2b73e53437a757437f264e99884eeedb564ce4e2a723a0345f0b383ab\": container with ID starting with 3d34b3f2b73e53437a757437f264e99884eeedb564ce4e2a723a0345f0b383ab not found: ID does not exist" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.631757 4757 scope.go:117] "RemoveContainer" containerID="c59b4f6d5b02fd4640ea7a51803ca4d31b0440d8779c46f342b1e5af2e8a2bf0" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.632224 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c59b4f6d5b02fd4640ea7a51803ca4d31b0440d8779c46f342b1e5af2e8a2bf0"} err="failed to get container status \"c59b4f6d5b02fd4640ea7a51803ca4d31b0440d8779c46f342b1e5af2e8a2bf0\": rpc error: code = NotFound desc = could not find container \"c59b4f6d5b02fd4640ea7a51803ca4d31b0440d8779c46f342b1e5af2e8a2bf0\": container with ID starting with c59b4f6d5b02fd4640ea7a51803ca4d31b0440d8779c46f342b1e5af2e8a2bf0 not found: ID does not exist" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.632263 4757 scope.go:117] "RemoveContainer" containerID="5852bc46ae07b295a97cca9f5ef7d06484c12744c0f6ee57b2d14acee319fcaf" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.632549 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5852bc46ae07b295a97cca9f5ef7d06484c12744c0f6ee57b2d14acee319fcaf"} err="failed to get container status \"5852bc46ae07b295a97cca9f5ef7d06484c12744c0f6ee57b2d14acee319fcaf\": rpc error: code = NotFound desc = could not find container \"5852bc46ae07b295a97cca9f5ef7d06484c12744c0f6ee57b2d14acee319fcaf\": container with ID starting with 5852bc46ae07b295a97cca9f5ef7d06484c12744c0f6ee57b2d14acee319fcaf not found: ID does not exist" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.632575 4757 scope.go:117] "RemoveContainer" containerID="df2b012b79afa6379b4a7add72a9093bbdbfcc8d618d6eae350a773fdddacfa8" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.632819 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"df2b012b79afa6379b4a7add72a9093bbdbfcc8d618d6eae350a773fdddacfa8"} err="failed to get container status \"df2b012b79afa6379b4a7add72a9093bbdbfcc8d618d6eae350a773fdddacfa8\": rpc error: code = NotFound desc = could not find container \"df2b012b79afa6379b4a7add72a9093bbdbfcc8d618d6eae350a773fdddacfa8\": container with ID starting with df2b012b79afa6379b4a7add72a9093bbdbfcc8d618d6eae350a773fdddacfa8 not found: ID does not exist" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.632849 4757 scope.go:117] "RemoveContainer" containerID="4093bde478f4910c2b1e2341bb0b3f640454b131e31b9ab7ad5900b8260b7561" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.633165 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4093bde478f4910c2b1e2341bb0b3f640454b131e31b9ab7ad5900b8260b7561"} err="failed to get container status \"4093bde478f4910c2b1e2341bb0b3f640454b131e31b9ab7ad5900b8260b7561\": rpc error: code = NotFound desc = could not find container \"4093bde478f4910c2b1e2341bb0b3f640454b131e31b9ab7ad5900b8260b7561\": container with ID starting with 4093bde478f4910c2b1e2341bb0b3f640454b131e31b9ab7ad5900b8260b7561 not found: ID does not exist" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.633200 4757 scope.go:117] "RemoveContainer" containerID="75c8038f49690378c7322e4d2e50f7d5073bb744c3f64317b7c3f4acffaa338d" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.633464 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"75c8038f49690378c7322e4d2e50f7d5073bb744c3f64317b7c3f4acffaa338d"} err="failed to get container status \"75c8038f49690378c7322e4d2e50f7d5073bb744c3f64317b7c3f4acffaa338d\": rpc error: code = NotFound desc = could not find container \"75c8038f49690378c7322e4d2e50f7d5073bb744c3f64317b7c3f4acffaa338d\": container with ID starting with 75c8038f49690378c7322e4d2e50f7d5073bb744c3f64317b7c3f4acffaa338d not found: ID does not exist" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.633505 4757 scope.go:117] "RemoveContainer" containerID="cbe06e8892901366539846fa68ea4e379c32ca7b21843383dc4a72ac70f92b98" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.633780 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cbe06e8892901366539846fa68ea4e379c32ca7b21843383dc4a72ac70f92b98"} err="failed to get container status \"cbe06e8892901366539846fa68ea4e379c32ca7b21843383dc4a72ac70f92b98\": rpc error: code = NotFound desc = could not find container \"cbe06e8892901366539846fa68ea4e379c32ca7b21843383dc4a72ac70f92b98\": container with ID starting with cbe06e8892901366539846fa68ea4e379c32ca7b21843383dc4a72ac70f92b98 not found: ID does not exist" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.633815 4757 scope.go:117] "RemoveContainer" containerID="1580d51cb7ec22f70787157dc80236a312eca77785329c663d78c02ee9a0685d" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.634121 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1580d51cb7ec22f70787157dc80236a312eca77785329c663d78c02ee9a0685d"} err="failed to get container status \"1580d51cb7ec22f70787157dc80236a312eca77785329c663d78c02ee9a0685d\": rpc error: code = NotFound desc = could not find container \"1580d51cb7ec22f70787157dc80236a312eca77785329c663d78c02ee9a0685d\": container with ID starting with 1580d51cb7ec22f70787157dc80236a312eca77785329c663d78c02ee9a0685d not found: ID does not exist" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.634149 4757 scope.go:117] "RemoveContainer" containerID="38df7deb98a16e465d5fdbad8e04c2e62c5cfb418a48fe9f01da335dba2907ed" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.634513 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"38df7deb98a16e465d5fdbad8e04c2e62c5cfb418a48fe9f01da335dba2907ed"} err="failed to get container status \"38df7deb98a16e465d5fdbad8e04c2e62c5cfb418a48fe9f01da335dba2907ed\": rpc error: code = NotFound desc = could not find container \"38df7deb98a16e465d5fdbad8e04c2e62c5cfb418a48fe9f01da335dba2907ed\": container with ID starting with 38df7deb98a16e465d5fdbad8e04c2e62c5cfb418a48fe9f01da335dba2907ed not found: ID does not exist" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.634574 4757 scope.go:117] "RemoveContainer" containerID="3fe5f1f425ddd770c518939dee061dc2641c1ca6c3538d8d30ac0724a6353936" Oct 06 13:49:24 crc kubenswrapper[4757]: I1006 13:49:24.634894 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3fe5f1f425ddd770c518939dee061dc2641c1ca6c3538d8d30ac0724a6353936"} err="failed to get container status \"3fe5f1f425ddd770c518939dee061dc2641c1ca6c3538d8d30ac0724a6353936\": rpc error: code = NotFound desc = could not find container \"3fe5f1f425ddd770c518939dee061dc2641c1ca6c3538d8d30ac0724a6353936\": container with ID starting with 3fe5f1f425ddd770c518939dee061dc2641c1ca6c3538d8d30ac0724a6353936 not found: ID does not exist" Oct 06 13:49:25 crc kubenswrapper[4757]: I1006 13:49:25.390798 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-9qf7z_9144d9fd-70d7-4a29-8e6b-c020c611980a/kube-multus/2.log" Oct 06 13:49:25 crc kubenswrapper[4757]: I1006 13:49:25.394670 4757 generic.go:334] "Generic (PLEG): container finished" podID="15b33d35-4328-4e9c-bc6e-43d0c847f253" containerID="6882263d86e6b9706b84c01795a204889dc0c935fdc16ba72177ca34404305e8" exitCode=0 Oct 06 13:49:25 crc kubenswrapper[4757]: I1006 13:49:25.394875 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l46sb" event={"ID":"15b33d35-4328-4e9c-bc6e-43d0c847f253","Type":"ContainerDied","Data":"6882263d86e6b9706b84c01795a204889dc0c935fdc16ba72177ca34404305e8"} Oct 06 13:49:25 crc kubenswrapper[4757]: I1006 13:49:25.394987 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l46sb" event={"ID":"15b33d35-4328-4e9c-bc6e-43d0c847f253","Type":"ContainerStarted","Data":"8f5d432f085fe935f11d0025df492e0ec66680b15feded73503111030d3e6e14"} Oct 06 13:49:26 crc kubenswrapper[4757]: I1006 13:49:26.191696 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a6624d05-e024-49f2-bf87-33e7ea4fccbb" path="/var/lib/kubelet/pods/a6624d05-e024-49f2-bf87-33e7ea4fccbb/volumes" Oct 06 13:49:26 crc kubenswrapper[4757]: I1006 13:49:26.407781 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l46sb" event={"ID":"15b33d35-4328-4e9c-bc6e-43d0c847f253","Type":"ContainerStarted","Data":"7954464ee9a3129ffbddec7245502818b53146da261a8c64f0870e6f5008167b"} Oct 06 13:49:26 crc kubenswrapper[4757]: I1006 13:49:26.407829 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l46sb" event={"ID":"15b33d35-4328-4e9c-bc6e-43d0c847f253","Type":"ContainerStarted","Data":"6d75c9b0c2f496ad6793aa9065029a0261b69beaad7ab941e9443f95ba0edf36"} Oct 06 13:49:26 crc kubenswrapper[4757]: I1006 13:49:26.407844 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l46sb" event={"ID":"15b33d35-4328-4e9c-bc6e-43d0c847f253","Type":"ContainerStarted","Data":"ef89e5ade41f928b21b4df1107dec7402ea0666a90985cf4274221109855e1f9"} Oct 06 13:49:26 crc kubenswrapper[4757]: I1006 13:49:26.407859 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l46sb" event={"ID":"15b33d35-4328-4e9c-bc6e-43d0c847f253","Type":"ContainerStarted","Data":"2a0bc916a5b02647fcb7334ec76ee9c5f996caa19b364de4f54e361b69e3c46f"} Oct 06 13:49:26 crc kubenswrapper[4757]: I1006 13:49:26.407872 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l46sb" event={"ID":"15b33d35-4328-4e9c-bc6e-43d0c847f253","Type":"ContainerStarted","Data":"3dba66c48b9866c42178bc1ff52521000c97190a3882ef463f7cdc8a8c18a247"} Oct 06 13:49:26 crc kubenswrapper[4757]: I1006 13:49:26.407883 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l46sb" event={"ID":"15b33d35-4328-4e9c-bc6e-43d0c847f253","Type":"ContainerStarted","Data":"01703d2aed46ea45f20b656875bbee3f64bc85a2c64cd7584252641f93d35981"} Oct 06 13:49:29 crc kubenswrapper[4757]: I1006 13:49:29.435671 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l46sb" event={"ID":"15b33d35-4328-4e9c-bc6e-43d0c847f253","Type":"ContainerStarted","Data":"7f69855db172a4c7675d107febea98be4b6ef5d1057316f092c4956cf3db0553"} Oct 06 13:49:31 crc kubenswrapper[4757]: I1006 13:49:31.459779 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-l46sb" event={"ID":"15b33d35-4328-4e9c-bc6e-43d0c847f253","Type":"ContainerStarted","Data":"8c88cb8f9efa884f2334477da993f4400754d2b8b9180ce019ecd4c6bbf50aa8"} Oct 06 13:49:31 crc kubenswrapper[4757]: I1006 13:49:31.460618 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-l46sb" Oct 06 13:49:31 crc kubenswrapper[4757]: I1006 13:49:31.460642 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-l46sb" Oct 06 13:49:31 crc kubenswrapper[4757]: I1006 13:49:31.498020 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-l46sb" Oct 06 13:49:31 crc kubenswrapper[4757]: I1006 13:49:31.500677 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-l46sb" podStartSLOduration=7.500655585 podStartE2EDuration="7.500655585s" podCreationTimestamp="2025-10-06 13:49:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:49:31.496864442 +0000 UTC m=+659.994183009" watchObservedRunningTime="2025-10-06 13:49:31.500655585 +0000 UTC m=+659.997974122" Oct 06 13:49:32 crc kubenswrapper[4757]: I1006 13:49:32.099712 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["crc-storage/crc-storage-crc-kwk85"] Oct 06 13:49:32 crc kubenswrapper[4757]: I1006 13:49:32.100655 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-kwk85" Oct 06 13:49:32 crc kubenswrapper[4757]: I1006 13:49:32.102563 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"crc-storage" Oct 06 13:49:32 crc kubenswrapper[4757]: I1006 13:49:32.102685 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"kube-root-ca.crt" Oct 06 13:49:32 crc kubenswrapper[4757]: I1006 13:49:32.104278 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"openshift-service-ca.crt" Oct 06 13:49:32 crc kubenswrapper[4757]: I1006 13:49:32.106015 4757 reflector.go:368] Caches populated for *v1.Secret from object-"crc-storage"/"crc-storage-dockercfg-8h4vq" Oct 06 13:49:32 crc kubenswrapper[4757]: I1006 13:49:32.108971 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-kwk85"] Oct 06 13:49:32 crc kubenswrapper[4757]: I1006 13:49:32.282859 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/d9d9e156-e9b3-454d-88d8-d5b2c9c54512-crc-storage\") pod \"crc-storage-crc-kwk85\" (UID: \"d9d9e156-e9b3-454d-88d8-d5b2c9c54512\") " pod="crc-storage/crc-storage-crc-kwk85" Oct 06 13:49:32 crc kubenswrapper[4757]: I1006 13:49:32.283447 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qbmh8\" (UniqueName: \"kubernetes.io/projected/d9d9e156-e9b3-454d-88d8-d5b2c9c54512-kube-api-access-qbmh8\") pod \"crc-storage-crc-kwk85\" (UID: \"d9d9e156-e9b3-454d-88d8-d5b2c9c54512\") " pod="crc-storage/crc-storage-crc-kwk85" Oct 06 13:49:32 crc kubenswrapper[4757]: I1006 13:49:32.283521 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/d9d9e156-e9b3-454d-88d8-d5b2c9c54512-node-mnt\") pod \"crc-storage-crc-kwk85\" (UID: \"d9d9e156-e9b3-454d-88d8-d5b2c9c54512\") " pod="crc-storage/crc-storage-crc-kwk85" Oct 06 13:49:32 crc kubenswrapper[4757]: I1006 13:49:32.385278 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qbmh8\" (UniqueName: \"kubernetes.io/projected/d9d9e156-e9b3-454d-88d8-d5b2c9c54512-kube-api-access-qbmh8\") pod \"crc-storage-crc-kwk85\" (UID: \"d9d9e156-e9b3-454d-88d8-d5b2c9c54512\") " pod="crc-storage/crc-storage-crc-kwk85" Oct 06 13:49:32 crc kubenswrapper[4757]: I1006 13:49:32.385349 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/d9d9e156-e9b3-454d-88d8-d5b2c9c54512-node-mnt\") pod \"crc-storage-crc-kwk85\" (UID: \"d9d9e156-e9b3-454d-88d8-d5b2c9c54512\") " pod="crc-storage/crc-storage-crc-kwk85" Oct 06 13:49:32 crc kubenswrapper[4757]: I1006 13:49:32.385405 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/d9d9e156-e9b3-454d-88d8-d5b2c9c54512-crc-storage\") pod \"crc-storage-crc-kwk85\" (UID: \"d9d9e156-e9b3-454d-88d8-d5b2c9c54512\") " pod="crc-storage/crc-storage-crc-kwk85" Oct 06 13:49:32 crc kubenswrapper[4757]: I1006 13:49:32.385974 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/d9d9e156-e9b3-454d-88d8-d5b2c9c54512-node-mnt\") pod \"crc-storage-crc-kwk85\" (UID: \"d9d9e156-e9b3-454d-88d8-d5b2c9c54512\") " pod="crc-storage/crc-storage-crc-kwk85" Oct 06 13:49:32 crc kubenswrapper[4757]: I1006 13:49:32.386310 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/d9d9e156-e9b3-454d-88d8-d5b2c9c54512-crc-storage\") pod \"crc-storage-crc-kwk85\" (UID: \"d9d9e156-e9b3-454d-88d8-d5b2c9c54512\") " pod="crc-storage/crc-storage-crc-kwk85" Oct 06 13:49:32 crc kubenswrapper[4757]: I1006 13:49:32.419582 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qbmh8\" (UniqueName: \"kubernetes.io/projected/d9d9e156-e9b3-454d-88d8-d5b2c9c54512-kube-api-access-qbmh8\") pod \"crc-storage-crc-kwk85\" (UID: \"d9d9e156-e9b3-454d-88d8-d5b2c9c54512\") " pod="crc-storage/crc-storage-crc-kwk85" Oct 06 13:49:32 crc kubenswrapper[4757]: I1006 13:49:32.465768 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-l46sb" Oct 06 13:49:32 crc kubenswrapper[4757]: I1006 13:49:32.506233 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-l46sb" Oct 06 13:49:32 crc kubenswrapper[4757]: I1006 13:49:32.717362 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-kwk85" Oct 06 13:49:32 crc kubenswrapper[4757]: E1006 13:49:32.763021 4757 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-kwk85_crc-storage_d9d9e156-e9b3-454d-88d8-d5b2c9c54512_0(c03979386658baf2314316f41d2b9b01a9c9bff302d2842cadaa44c9e8292aad): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Oct 06 13:49:32 crc kubenswrapper[4757]: E1006 13:49:32.763268 4757 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-kwk85_crc-storage_d9d9e156-e9b3-454d-88d8-d5b2c9c54512_0(c03979386658baf2314316f41d2b9b01a9c9bff302d2842cadaa44c9e8292aad): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="crc-storage/crc-storage-crc-kwk85" Oct 06 13:49:32 crc kubenswrapper[4757]: E1006 13:49:32.763321 4757 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-kwk85_crc-storage_d9d9e156-e9b3-454d-88d8-d5b2c9c54512_0(c03979386658baf2314316f41d2b9b01a9c9bff302d2842cadaa44c9e8292aad): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="crc-storage/crc-storage-crc-kwk85" Oct 06 13:49:32 crc kubenswrapper[4757]: E1006 13:49:32.763436 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"crc-storage-crc-kwk85_crc-storage(d9d9e156-e9b3-454d-88d8-d5b2c9c54512)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"crc-storage-crc-kwk85_crc-storage(d9d9e156-e9b3-454d-88d8-d5b2c9c54512)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-kwk85_crc-storage_d9d9e156-e9b3-454d-88d8-d5b2c9c54512_0(c03979386658baf2314316f41d2b9b01a9c9bff302d2842cadaa44c9e8292aad): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="crc-storage/crc-storage-crc-kwk85" podUID="d9d9e156-e9b3-454d-88d8-d5b2c9c54512" Oct 06 13:49:33 crc kubenswrapper[4757]: I1006 13:49:33.472256 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-kwk85" Oct 06 13:49:33 crc kubenswrapper[4757]: I1006 13:49:33.474173 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-kwk85" Oct 06 13:49:33 crc kubenswrapper[4757]: E1006 13:49:33.510501 4757 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-kwk85_crc-storage_d9d9e156-e9b3-454d-88d8-d5b2c9c54512_0(1fda9ec583eed87104f4fd9a43c0a5b2423e8a318e6cf990b91638e3dcf4d674): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Oct 06 13:49:33 crc kubenswrapper[4757]: E1006 13:49:33.510592 4757 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-kwk85_crc-storage_d9d9e156-e9b3-454d-88d8-d5b2c9c54512_0(1fda9ec583eed87104f4fd9a43c0a5b2423e8a318e6cf990b91638e3dcf4d674): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="crc-storage/crc-storage-crc-kwk85" Oct 06 13:49:33 crc kubenswrapper[4757]: E1006 13:49:33.510626 4757 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-kwk85_crc-storage_d9d9e156-e9b3-454d-88d8-d5b2c9c54512_0(1fda9ec583eed87104f4fd9a43c0a5b2423e8a318e6cf990b91638e3dcf4d674): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="crc-storage/crc-storage-crc-kwk85" Oct 06 13:49:33 crc kubenswrapper[4757]: E1006 13:49:33.510687 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"crc-storage-crc-kwk85_crc-storage(d9d9e156-e9b3-454d-88d8-d5b2c9c54512)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"crc-storage-crc-kwk85_crc-storage(d9d9e156-e9b3-454d-88d8-d5b2c9c54512)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-kwk85_crc-storage_d9d9e156-e9b3-454d-88d8-d5b2c9c54512_0(1fda9ec583eed87104f4fd9a43c0a5b2423e8a318e6cf990b91638e3dcf4d674): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="crc-storage/crc-storage-crc-kwk85" podUID="d9d9e156-e9b3-454d-88d8-d5b2c9c54512" Oct 06 13:49:35 crc kubenswrapper[4757]: I1006 13:49:35.180746 4757 scope.go:117] "RemoveContainer" containerID="5fee59d9cfb29bcaa00f0f7e454083411d714d0dcdc7f6fb55333dd7c18d4f4b" Oct 06 13:49:35 crc kubenswrapper[4757]: E1006 13:49:35.181550 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-9qf7z_openshift-multus(9144d9fd-70d7-4a29-8e6b-c020c611980a)\"" pod="openshift-multus/multus-9qf7z" podUID="9144d9fd-70d7-4a29-8e6b-c020c611980a" Oct 06 13:49:48 crc kubenswrapper[4757]: I1006 13:49:48.180327 4757 scope.go:117] "RemoveContainer" containerID="5fee59d9cfb29bcaa00f0f7e454083411d714d0dcdc7f6fb55333dd7c18d4f4b" Oct 06 13:49:48 crc kubenswrapper[4757]: I1006 13:49:48.582171 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-9qf7z_9144d9fd-70d7-4a29-8e6b-c020c611980a/kube-multus/2.log" Oct 06 13:49:48 crc kubenswrapper[4757]: I1006 13:49:48.582734 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-9qf7z" event={"ID":"9144d9fd-70d7-4a29-8e6b-c020c611980a","Type":"ContainerStarted","Data":"204c440cd90fa6e90bf009f8f3dd938d757168ee9a7fd1cf67073cd5c65798bd"} Oct 06 13:49:49 crc kubenswrapper[4757]: I1006 13:49:49.179368 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-kwk85" Oct 06 13:49:49 crc kubenswrapper[4757]: I1006 13:49:49.180312 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-kwk85" Oct 06 13:49:49 crc kubenswrapper[4757]: E1006 13:49:49.214333 4757 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-kwk85_crc-storage_d9d9e156-e9b3-454d-88d8-d5b2c9c54512_0(8010ce7a99a33c211f66b9b0c682ad7994afb4a3c76729c5b86a61deb754e9dc): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Oct 06 13:49:49 crc kubenswrapper[4757]: E1006 13:49:49.214413 4757 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-kwk85_crc-storage_d9d9e156-e9b3-454d-88d8-d5b2c9c54512_0(8010ce7a99a33c211f66b9b0c682ad7994afb4a3c76729c5b86a61deb754e9dc): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="crc-storage/crc-storage-crc-kwk85" Oct 06 13:49:49 crc kubenswrapper[4757]: E1006 13:49:49.214446 4757 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-kwk85_crc-storage_d9d9e156-e9b3-454d-88d8-d5b2c9c54512_0(8010ce7a99a33c211f66b9b0c682ad7994afb4a3c76729c5b86a61deb754e9dc): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="crc-storage/crc-storage-crc-kwk85" Oct 06 13:49:49 crc kubenswrapper[4757]: E1006 13:49:49.214534 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"crc-storage-crc-kwk85_crc-storage(d9d9e156-e9b3-454d-88d8-d5b2c9c54512)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"crc-storage-crc-kwk85_crc-storage(d9d9e156-e9b3-454d-88d8-d5b2c9c54512)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-kwk85_crc-storage_d9d9e156-e9b3-454d-88d8-d5b2c9c54512_0(8010ce7a99a33c211f66b9b0c682ad7994afb4a3c76729c5b86a61deb754e9dc): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="crc-storage/crc-storage-crc-kwk85" podUID="d9d9e156-e9b3-454d-88d8-d5b2c9c54512" Oct 06 13:49:54 crc kubenswrapper[4757]: I1006 13:49:54.583911 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-l46sb" Oct 06 13:50:03 crc kubenswrapper[4757]: I1006 13:50:03.179647 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-kwk85" Oct 06 13:50:03 crc kubenswrapper[4757]: I1006 13:50:03.181262 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-kwk85" Oct 06 13:50:03 crc kubenswrapper[4757]: I1006 13:50:03.689836 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-kwk85"] Oct 06 13:50:03 crc kubenswrapper[4757]: I1006 13:50:03.700482 4757 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 06 13:50:04 crc kubenswrapper[4757]: I1006 13:50:04.696323 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-kwk85" event={"ID":"d9d9e156-e9b3-454d-88d8-d5b2c9c54512","Type":"ContainerStarted","Data":"158a86d7835b515598ae414ebcd35f59fa274d47cafb2420a0dcb276fe7fbd14"} Oct 06 13:50:05 crc kubenswrapper[4757]: I1006 13:50:05.706085 4757 generic.go:334] "Generic (PLEG): container finished" podID="d9d9e156-e9b3-454d-88d8-d5b2c9c54512" containerID="1ede0fc18eea69d8072bbcd68d5c54e1392f3047086b2604a03f157eb5999fb6" exitCode=0 Oct 06 13:50:05 crc kubenswrapper[4757]: I1006 13:50:05.706191 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-kwk85" event={"ID":"d9d9e156-e9b3-454d-88d8-d5b2c9c54512","Type":"ContainerDied","Data":"1ede0fc18eea69d8072bbcd68d5c54e1392f3047086b2604a03f157eb5999fb6"} Oct 06 13:50:07 crc kubenswrapper[4757]: I1006 13:50:07.011838 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-kwk85" Oct 06 13:50:07 crc kubenswrapper[4757]: I1006 13:50:07.096805 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qbmh8\" (UniqueName: \"kubernetes.io/projected/d9d9e156-e9b3-454d-88d8-d5b2c9c54512-kube-api-access-qbmh8\") pod \"d9d9e156-e9b3-454d-88d8-d5b2c9c54512\" (UID: \"d9d9e156-e9b3-454d-88d8-d5b2c9c54512\") " Oct 06 13:50:07 crc kubenswrapper[4757]: I1006 13:50:07.096976 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/d9d9e156-e9b3-454d-88d8-d5b2c9c54512-node-mnt\") pod \"d9d9e156-e9b3-454d-88d8-d5b2c9c54512\" (UID: \"d9d9e156-e9b3-454d-88d8-d5b2c9c54512\") " Oct 06 13:50:07 crc kubenswrapper[4757]: I1006 13:50:07.097329 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/d9d9e156-e9b3-454d-88d8-d5b2c9c54512-crc-storage\") pod \"d9d9e156-e9b3-454d-88d8-d5b2c9c54512\" (UID: \"d9d9e156-e9b3-454d-88d8-d5b2c9c54512\") " Oct 06 13:50:07 crc kubenswrapper[4757]: I1006 13:50:07.097406 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d9d9e156-e9b3-454d-88d8-d5b2c9c54512-node-mnt" (OuterVolumeSpecName: "node-mnt") pod "d9d9e156-e9b3-454d-88d8-d5b2c9c54512" (UID: "d9d9e156-e9b3-454d-88d8-d5b2c9c54512"). InnerVolumeSpecName "node-mnt". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 06 13:50:07 crc kubenswrapper[4757]: I1006 13:50:07.097872 4757 reconciler_common.go:293] "Volume detached for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/d9d9e156-e9b3-454d-88d8-d5b2c9c54512-node-mnt\") on node \"crc\" DevicePath \"\"" Oct 06 13:50:07 crc kubenswrapper[4757]: I1006 13:50:07.106395 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d9d9e156-e9b3-454d-88d8-d5b2c9c54512-kube-api-access-qbmh8" (OuterVolumeSpecName: "kube-api-access-qbmh8") pod "d9d9e156-e9b3-454d-88d8-d5b2c9c54512" (UID: "d9d9e156-e9b3-454d-88d8-d5b2c9c54512"). InnerVolumeSpecName "kube-api-access-qbmh8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:50:07 crc kubenswrapper[4757]: I1006 13:50:07.125674 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d9d9e156-e9b3-454d-88d8-d5b2c9c54512-crc-storage" (OuterVolumeSpecName: "crc-storage") pod "d9d9e156-e9b3-454d-88d8-d5b2c9c54512" (UID: "d9d9e156-e9b3-454d-88d8-d5b2c9c54512"). InnerVolumeSpecName "crc-storage". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:50:07 crc kubenswrapper[4757]: I1006 13:50:07.199030 4757 reconciler_common.go:293] "Volume detached for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/d9d9e156-e9b3-454d-88d8-d5b2c9c54512-crc-storage\") on node \"crc\" DevicePath \"\"" Oct 06 13:50:07 crc kubenswrapper[4757]: I1006 13:50:07.199082 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qbmh8\" (UniqueName: \"kubernetes.io/projected/d9d9e156-e9b3-454d-88d8-d5b2c9c54512-kube-api-access-qbmh8\") on node \"crc\" DevicePath \"\"" Oct 06 13:50:07 crc kubenswrapper[4757]: I1006 13:50:07.722348 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-kwk85" event={"ID":"d9d9e156-e9b3-454d-88d8-d5b2c9c54512","Type":"ContainerDied","Data":"158a86d7835b515598ae414ebcd35f59fa274d47cafb2420a0dcb276fe7fbd14"} Oct 06 13:50:07 crc kubenswrapper[4757]: I1006 13:50:07.722407 4757 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="158a86d7835b515598ae414ebcd35f59fa274d47cafb2420a0dcb276fe7fbd14" Oct 06 13:50:07 crc kubenswrapper[4757]: I1006 13:50:07.722461 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-kwk85" Oct 06 13:50:15 crc kubenswrapper[4757]: I1006 13:50:15.335860 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cnfqkv"] Oct 06 13:50:15 crc kubenswrapper[4757]: E1006 13:50:15.337116 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d9d9e156-e9b3-454d-88d8-d5b2c9c54512" containerName="storage" Oct 06 13:50:15 crc kubenswrapper[4757]: I1006 13:50:15.337135 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="d9d9e156-e9b3-454d-88d8-d5b2c9c54512" containerName="storage" Oct 06 13:50:15 crc kubenswrapper[4757]: I1006 13:50:15.337288 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="d9d9e156-e9b3-454d-88d8-d5b2c9c54512" containerName="storage" Oct 06 13:50:15 crc kubenswrapper[4757]: I1006 13:50:15.338398 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cnfqkv" Oct 06 13:50:15 crc kubenswrapper[4757]: I1006 13:50:15.348532 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Oct 06 13:50:15 crc kubenswrapper[4757]: I1006 13:50:15.393257 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cnfqkv"] Oct 06 13:50:15 crc kubenswrapper[4757]: I1006 13:50:15.434835 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/1f413cfe-e073-451c-815c-a246cea1099e-bundle\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cnfqkv\" (UID: \"1f413cfe-e073-451c-815c-a246cea1099e\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cnfqkv" Oct 06 13:50:15 crc kubenswrapper[4757]: I1006 13:50:15.434906 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/1f413cfe-e073-451c-815c-a246cea1099e-util\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cnfqkv\" (UID: \"1f413cfe-e073-451c-815c-a246cea1099e\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cnfqkv" Oct 06 13:50:15 crc kubenswrapper[4757]: I1006 13:50:15.434950 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z25xp\" (UniqueName: \"kubernetes.io/projected/1f413cfe-e073-451c-815c-a246cea1099e-kube-api-access-z25xp\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cnfqkv\" (UID: \"1f413cfe-e073-451c-815c-a246cea1099e\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cnfqkv" Oct 06 13:50:15 crc kubenswrapper[4757]: I1006 13:50:15.536268 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/1f413cfe-e073-451c-815c-a246cea1099e-bundle\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cnfqkv\" (UID: \"1f413cfe-e073-451c-815c-a246cea1099e\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cnfqkv" Oct 06 13:50:15 crc kubenswrapper[4757]: I1006 13:50:15.536341 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/1f413cfe-e073-451c-815c-a246cea1099e-util\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cnfqkv\" (UID: \"1f413cfe-e073-451c-815c-a246cea1099e\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cnfqkv" Oct 06 13:50:15 crc kubenswrapper[4757]: I1006 13:50:15.536408 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z25xp\" (UniqueName: \"kubernetes.io/projected/1f413cfe-e073-451c-815c-a246cea1099e-kube-api-access-z25xp\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cnfqkv\" (UID: \"1f413cfe-e073-451c-815c-a246cea1099e\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cnfqkv" Oct 06 13:50:15 crc kubenswrapper[4757]: I1006 13:50:15.537847 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/1f413cfe-e073-451c-815c-a246cea1099e-util\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cnfqkv\" (UID: \"1f413cfe-e073-451c-815c-a246cea1099e\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cnfqkv" Oct 06 13:50:15 crc kubenswrapper[4757]: I1006 13:50:15.537861 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/1f413cfe-e073-451c-815c-a246cea1099e-bundle\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cnfqkv\" (UID: \"1f413cfe-e073-451c-815c-a246cea1099e\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cnfqkv" Oct 06 13:50:15 crc kubenswrapper[4757]: I1006 13:50:15.562115 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z25xp\" (UniqueName: \"kubernetes.io/projected/1f413cfe-e073-451c-815c-a246cea1099e-kube-api-access-z25xp\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cnfqkv\" (UID: \"1f413cfe-e073-451c-815c-a246cea1099e\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cnfqkv" Oct 06 13:50:15 crc kubenswrapper[4757]: I1006 13:50:15.681840 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cnfqkv" Oct 06 13:50:15 crc kubenswrapper[4757]: I1006 13:50:15.865473 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cnfqkv"] Oct 06 13:50:16 crc kubenswrapper[4757]: I1006 13:50:16.780346 4757 generic.go:334] "Generic (PLEG): container finished" podID="1f413cfe-e073-451c-815c-a246cea1099e" containerID="7e8ac2ec50bcf66a88b62725a5b49a293456d355300cb63cbdf0c222f5119c8b" exitCode=0 Oct 06 13:50:16 crc kubenswrapper[4757]: I1006 13:50:16.780478 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cnfqkv" event={"ID":"1f413cfe-e073-451c-815c-a246cea1099e","Type":"ContainerDied","Data":"7e8ac2ec50bcf66a88b62725a5b49a293456d355300cb63cbdf0c222f5119c8b"} Oct 06 13:50:16 crc kubenswrapper[4757]: I1006 13:50:16.780845 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cnfqkv" event={"ID":"1f413cfe-e073-451c-815c-a246cea1099e","Type":"ContainerStarted","Data":"689af44f3c4bada29f0b17bc349c28b8ca1e49d7df27aad46579831d14d339df"} Oct 06 13:50:18 crc kubenswrapper[4757]: I1006 13:50:18.791192 4757 generic.go:334] "Generic (PLEG): container finished" podID="1f413cfe-e073-451c-815c-a246cea1099e" containerID="fdaeb1555669f88eb7efa317c8f2a895a344cc8e54ca39e39dce07d42e479924" exitCode=0 Oct 06 13:50:18 crc kubenswrapper[4757]: I1006 13:50:18.791244 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cnfqkv" event={"ID":"1f413cfe-e073-451c-815c-a246cea1099e","Type":"ContainerDied","Data":"fdaeb1555669f88eb7efa317c8f2a895a344cc8e54ca39e39dce07d42e479924"} Oct 06 13:50:19 crc kubenswrapper[4757]: I1006 13:50:19.799655 4757 generic.go:334] "Generic (PLEG): container finished" podID="1f413cfe-e073-451c-815c-a246cea1099e" containerID="75426c0eda91a637b4153d49162836084e3706b164b397df0c1182f1c7a9ab23" exitCode=0 Oct 06 13:50:19 crc kubenswrapper[4757]: I1006 13:50:19.799715 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cnfqkv" event={"ID":"1f413cfe-e073-451c-815c-a246cea1099e","Type":"ContainerDied","Data":"75426c0eda91a637b4153d49162836084e3706b164b397df0c1182f1c7a9ab23"} Oct 06 13:50:21 crc kubenswrapper[4757]: I1006 13:50:21.048546 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cnfqkv" Oct 06 13:50:21 crc kubenswrapper[4757]: I1006 13:50:21.207399 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z25xp\" (UniqueName: \"kubernetes.io/projected/1f413cfe-e073-451c-815c-a246cea1099e-kube-api-access-z25xp\") pod \"1f413cfe-e073-451c-815c-a246cea1099e\" (UID: \"1f413cfe-e073-451c-815c-a246cea1099e\") " Oct 06 13:50:21 crc kubenswrapper[4757]: I1006 13:50:21.207596 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/1f413cfe-e073-451c-815c-a246cea1099e-bundle\") pod \"1f413cfe-e073-451c-815c-a246cea1099e\" (UID: \"1f413cfe-e073-451c-815c-a246cea1099e\") " Oct 06 13:50:21 crc kubenswrapper[4757]: I1006 13:50:21.207702 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/1f413cfe-e073-451c-815c-a246cea1099e-util\") pod \"1f413cfe-e073-451c-815c-a246cea1099e\" (UID: \"1f413cfe-e073-451c-815c-a246cea1099e\") " Oct 06 13:50:21 crc kubenswrapper[4757]: I1006 13:50:21.208739 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1f413cfe-e073-451c-815c-a246cea1099e-bundle" (OuterVolumeSpecName: "bundle") pod "1f413cfe-e073-451c-815c-a246cea1099e" (UID: "1f413cfe-e073-451c-815c-a246cea1099e"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 13:50:21 crc kubenswrapper[4757]: I1006 13:50:21.213691 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1f413cfe-e073-451c-815c-a246cea1099e-kube-api-access-z25xp" (OuterVolumeSpecName: "kube-api-access-z25xp") pod "1f413cfe-e073-451c-815c-a246cea1099e" (UID: "1f413cfe-e073-451c-815c-a246cea1099e"). InnerVolumeSpecName "kube-api-access-z25xp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:50:21 crc kubenswrapper[4757]: I1006 13:50:21.221496 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1f413cfe-e073-451c-815c-a246cea1099e-util" (OuterVolumeSpecName: "util") pod "1f413cfe-e073-451c-815c-a246cea1099e" (UID: "1f413cfe-e073-451c-815c-a246cea1099e"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 13:50:21 crc kubenswrapper[4757]: I1006 13:50:21.315260 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z25xp\" (UniqueName: \"kubernetes.io/projected/1f413cfe-e073-451c-815c-a246cea1099e-kube-api-access-z25xp\") on node \"crc\" DevicePath \"\"" Oct 06 13:50:21 crc kubenswrapper[4757]: I1006 13:50:21.315328 4757 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/1f413cfe-e073-451c-815c-a246cea1099e-bundle\") on node \"crc\" DevicePath \"\"" Oct 06 13:50:21 crc kubenswrapper[4757]: I1006 13:50:21.315361 4757 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/1f413cfe-e073-451c-815c-a246cea1099e-util\") on node \"crc\" DevicePath \"\"" Oct 06 13:50:21 crc kubenswrapper[4757]: I1006 13:50:21.815295 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cnfqkv" event={"ID":"1f413cfe-e073-451c-815c-a246cea1099e","Type":"ContainerDied","Data":"689af44f3c4bada29f0b17bc349c28b8ca1e49d7df27aad46579831d14d339df"} Oct 06 13:50:21 crc kubenswrapper[4757]: I1006 13:50:21.815342 4757 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="689af44f3c4bada29f0b17bc349c28b8ca1e49d7df27aad46579831d14d339df" Oct 06 13:50:21 crc kubenswrapper[4757]: I1006 13:50:21.815375 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cnfqkv" Oct 06 13:50:23 crc kubenswrapper[4757]: I1006 13:50:23.180486 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-858ddd8f98-skdq9"] Oct 06 13:50:23 crc kubenswrapper[4757]: E1006 13:50:23.181131 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1f413cfe-e073-451c-815c-a246cea1099e" containerName="extract" Oct 06 13:50:23 crc kubenswrapper[4757]: I1006 13:50:23.181147 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="1f413cfe-e073-451c-815c-a246cea1099e" containerName="extract" Oct 06 13:50:23 crc kubenswrapper[4757]: E1006 13:50:23.181171 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1f413cfe-e073-451c-815c-a246cea1099e" containerName="util" Oct 06 13:50:23 crc kubenswrapper[4757]: I1006 13:50:23.181178 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="1f413cfe-e073-451c-815c-a246cea1099e" containerName="util" Oct 06 13:50:23 crc kubenswrapper[4757]: E1006 13:50:23.181186 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1f413cfe-e073-451c-815c-a246cea1099e" containerName="pull" Oct 06 13:50:23 crc kubenswrapper[4757]: I1006 13:50:23.181194 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="1f413cfe-e073-451c-815c-a246cea1099e" containerName="pull" Oct 06 13:50:23 crc kubenswrapper[4757]: I1006 13:50:23.181302 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="1f413cfe-e073-451c-815c-a246cea1099e" containerName="extract" Oct 06 13:50:23 crc kubenswrapper[4757]: I1006 13:50:23.181736 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-858ddd8f98-skdq9" Oct 06 13:50:23 crc kubenswrapper[4757]: I1006 13:50:23.184231 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Oct 06 13:50:23 crc kubenswrapper[4757]: I1006 13:50:23.184426 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Oct 06 13:50:23 crc kubenswrapper[4757]: I1006 13:50:23.184712 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-9mln2" Oct 06 13:50:23 crc kubenswrapper[4757]: I1006 13:50:23.191705 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-858ddd8f98-skdq9"] Oct 06 13:50:23 crc kubenswrapper[4757]: I1006 13:50:23.338200 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kdxpb\" (UniqueName: \"kubernetes.io/projected/28c42384-fa75-491c-a8a8-1b66cacf2c04-kube-api-access-kdxpb\") pod \"nmstate-operator-858ddd8f98-skdq9\" (UID: \"28c42384-fa75-491c-a8a8-1b66cacf2c04\") " pod="openshift-nmstate/nmstate-operator-858ddd8f98-skdq9" Oct 06 13:50:23 crc kubenswrapper[4757]: I1006 13:50:23.439529 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kdxpb\" (UniqueName: \"kubernetes.io/projected/28c42384-fa75-491c-a8a8-1b66cacf2c04-kube-api-access-kdxpb\") pod \"nmstate-operator-858ddd8f98-skdq9\" (UID: \"28c42384-fa75-491c-a8a8-1b66cacf2c04\") " pod="openshift-nmstate/nmstate-operator-858ddd8f98-skdq9" Oct 06 13:50:23 crc kubenswrapper[4757]: I1006 13:50:23.464710 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kdxpb\" (UniqueName: \"kubernetes.io/projected/28c42384-fa75-491c-a8a8-1b66cacf2c04-kube-api-access-kdxpb\") pod \"nmstate-operator-858ddd8f98-skdq9\" (UID: \"28c42384-fa75-491c-a8a8-1b66cacf2c04\") " pod="openshift-nmstate/nmstate-operator-858ddd8f98-skdq9" Oct 06 13:50:23 crc kubenswrapper[4757]: I1006 13:50:23.496636 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-858ddd8f98-skdq9" Oct 06 13:50:23 crc kubenswrapper[4757]: I1006 13:50:23.690154 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-858ddd8f98-skdq9"] Oct 06 13:50:23 crc kubenswrapper[4757]: W1006 13:50:23.702344 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod28c42384_fa75_491c_a8a8_1b66cacf2c04.slice/crio-52b7685028be1152ce58db39ea910b020caf4ad93e0ff1760378e2233e3668de WatchSource:0}: Error finding container 52b7685028be1152ce58db39ea910b020caf4ad93e0ff1760378e2233e3668de: Status 404 returned error can't find the container with id 52b7685028be1152ce58db39ea910b020caf4ad93e0ff1760378e2233e3668de Oct 06 13:50:23 crc kubenswrapper[4757]: I1006 13:50:23.827364 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-858ddd8f98-skdq9" event={"ID":"28c42384-fa75-491c-a8a8-1b66cacf2c04","Type":"ContainerStarted","Data":"52b7685028be1152ce58db39ea910b020caf4ad93e0ff1760378e2233e3668de"} Oct 06 13:50:25 crc kubenswrapper[4757]: I1006 13:50:25.843882 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-858ddd8f98-skdq9" event={"ID":"28c42384-fa75-491c-a8a8-1b66cacf2c04","Type":"ContainerStarted","Data":"7299df1730c82c5fb55e0b54124c7f449da9399d0acbeb137ed933aca94d377d"} Oct 06 13:50:25 crc kubenswrapper[4757]: I1006 13:50:25.859136 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-858ddd8f98-skdq9" podStartSLOduration=1.077175941 podStartE2EDuration="2.859115129s" podCreationTimestamp="2025-10-06 13:50:23 +0000 UTC" firstStartedPulling="2025-10-06 13:50:23.704366424 +0000 UTC m=+712.201684961" lastFinishedPulling="2025-10-06 13:50:25.486305622 +0000 UTC m=+713.983624149" observedRunningTime="2025-10-06 13:50:25.856138492 +0000 UTC m=+714.353457039" watchObservedRunningTime="2025-10-06 13:50:25.859115129 +0000 UTC m=+714.356433676" Oct 06 13:50:26 crc kubenswrapper[4757]: I1006 13:50:26.743180 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-fdff9cb8d-lhkl6"] Oct 06 13:50:26 crc kubenswrapper[4757]: I1006 13:50:26.744518 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-lhkl6" Oct 06 13:50:26 crc kubenswrapper[4757]: I1006 13:50:26.747123 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-njh2g" Oct 06 13:50:26 crc kubenswrapper[4757]: I1006 13:50:26.756461 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-6cdbc54649-db26x"] Oct 06 13:50:26 crc kubenswrapper[4757]: I1006 13:50:26.757500 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-db26x" Oct 06 13:50:26 crc kubenswrapper[4757]: I1006 13:50:26.760667 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Oct 06 13:50:26 crc kubenswrapper[4757]: I1006 13:50:26.763291 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-fdff9cb8d-lhkl6"] Oct 06 13:50:26 crc kubenswrapper[4757]: I1006 13:50:26.776303 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s8z46\" (UniqueName: \"kubernetes.io/projected/eb37b835-d99b-4893-a0a2-659afa1391e4-kube-api-access-s8z46\") pod \"nmstate-webhook-6cdbc54649-db26x\" (UID: \"eb37b835-d99b-4893-a0a2-659afa1391e4\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-db26x" Oct 06 13:50:26 crc kubenswrapper[4757]: I1006 13:50:26.776726 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/eb37b835-d99b-4893-a0a2-659afa1391e4-tls-key-pair\") pod \"nmstate-webhook-6cdbc54649-db26x\" (UID: \"eb37b835-d99b-4893-a0a2-659afa1391e4\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-db26x" Oct 06 13:50:26 crc kubenswrapper[4757]: I1006 13:50:26.776817 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-44wx7\" (UniqueName: \"kubernetes.io/projected/54800f67-40f0-4ea1-bc26-76baed5d2663-kube-api-access-44wx7\") pod \"nmstate-metrics-fdff9cb8d-lhkl6\" (UID: \"54800f67-40f0-4ea1-bc26-76baed5d2663\") " pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-lhkl6" Oct 06 13:50:26 crc kubenswrapper[4757]: I1006 13:50:26.780774 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-xw9cw"] Oct 06 13:50:26 crc kubenswrapper[4757]: I1006 13:50:26.781677 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-xw9cw" Oct 06 13:50:26 crc kubenswrapper[4757]: I1006 13:50:26.790525 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-6cdbc54649-db26x"] Oct 06 13:50:26 crc kubenswrapper[4757]: I1006 13:50:26.867619 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-6b874cbd85-ccdp9"] Oct 06 13:50:26 crc kubenswrapper[4757]: I1006 13:50:26.868299 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-ccdp9" Oct 06 13:50:26 crc kubenswrapper[4757]: I1006 13:50:26.870904 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-wkmgc" Oct 06 13:50:26 crc kubenswrapper[4757]: I1006 13:50:26.871218 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Oct 06 13:50:26 crc kubenswrapper[4757]: I1006 13:50:26.871385 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Oct 06 13:50:26 crc kubenswrapper[4757]: I1006 13:50:26.877033 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-6b874cbd85-ccdp9"] Oct 06 13:50:26 crc kubenswrapper[4757]: I1006 13:50:26.878111 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nnvvp\" (UniqueName: \"kubernetes.io/projected/c6cb2b63-6ae0-4d31-8b00-de6ffff21df7-kube-api-access-nnvvp\") pod \"nmstate-console-plugin-6b874cbd85-ccdp9\" (UID: \"c6cb2b63-6ae0-4d31-8b00-de6ffff21df7\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-ccdp9" Oct 06 13:50:26 crc kubenswrapper[4757]: I1006 13:50:26.878256 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s8z46\" (UniqueName: \"kubernetes.io/projected/eb37b835-d99b-4893-a0a2-659afa1391e4-kube-api-access-s8z46\") pod \"nmstate-webhook-6cdbc54649-db26x\" (UID: \"eb37b835-d99b-4893-a0a2-659afa1391e4\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-db26x" Oct 06 13:50:26 crc kubenswrapper[4757]: I1006 13:50:26.878342 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/eb37b835-d99b-4893-a0a2-659afa1391e4-tls-key-pair\") pod \"nmstate-webhook-6cdbc54649-db26x\" (UID: \"eb37b835-d99b-4893-a0a2-659afa1391e4\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-db26x" Oct 06 13:50:26 crc kubenswrapper[4757]: I1006 13:50:26.878416 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-55bsp\" (UniqueName: \"kubernetes.io/projected/d715a33c-ce03-4762-8091-b73d2b53f929-kube-api-access-55bsp\") pod \"nmstate-handler-xw9cw\" (UID: \"d715a33c-ce03-4762-8091-b73d2b53f929\") " pod="openshift-nmstate/nmstate-handler-xw9cw" Oct 06 13:50:26 crc kubenswrapper[4757]: I1006 13:50:26.878483 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/c6cb2b63-6ae0-4d31-8b00-de6ffff21df7-nginx-conf\") pod \"nmstate-console-plugin-6b874cbd85-ccdp9\" (UID: \"c6cb2b63-6ae0-4d31-8b00-de6ffff21df7\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-ccdp9" Oct 06 13:50:26 crc kubenswrapper[4757]: I1006 13:50:26.878602 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/c6cb2b63-6ae0-4d31-8b00-de6ffff21df7-plugin-serving-cert\") pod \"nmstate-console-plugin-6b874cbd85-ccdp9\" (UID: \"c6cb2b63-6ae0-4d31-8b00-de6ffff21df7\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-ccdp9" Oct 06 13:50:26 crc kubenswrapper[4757]: I1006 13:50:26.878647 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/d715a33c-ce03-4762-8091-b73d2b53f929-ovs-socket\") pod \"nmstate-handler-xw9cw\" (UID: \"d715a33c-ce03-4762-8091-b73d2b53f929\") " pod="openshift-nmstate/nmstate-handler-xw9cw" Oct 06 13:50:26 crc kubenswrapper[4757]: I1006 13:50:26.878665 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/d715a33c-ce03-4762-8091-b73d2b53f929-nmstate-lock\") pod \"nmstate-handler-xw9cw\" (UID: \"d715a33c-ce03-4762-8091-b73d2b53f929\") " pod="openshift-nmstate/nmstate-handler-xw9cw" Oct 06 13:50:26 crc kubenswrapper[4757]: I1006 13:50:26.878711 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/d715a33c-ce03-4762-8091-b73d2b53f929-dbus-socket\") pod \"nmstate-handler-xw9cw\" (UID: \"d715a33c-ce03-4762-8091-b73d2b53f929\") " pod="openshift-nmstate/nmstate-handler-xw9cw" Oct 06 13:50:26 crc kubenswrapper[4757]: I1006 13:50:26.878769 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-44wx7\" (UniqueName: \"kubernetes.io/projected/54800f67-40f0-4ea1-bc26-76baed5d2663-kube-api-access-44wx7\") pod \"nmstate-metrics-fdff9cb8d-lhkl6\" (UID: \"54800f67-40f0-4ea1-bc26-76baed5d2663\") " pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-lhkl6" Oct 06 13:50:26 crc kubenswrapper[4757]: E1006 13:50:26.878847 4757 secret.go:188] Couldn't get secret openshift-nmstate/openshift-nmstate-webhook: secret "openshift-nmstate-webhook" not found Oct 06 13:50:26 crc kubenswrapper[4757]: E1006 13:50:26.878949 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/eb37b835-d99b-4893-a0a2-659afa1391e4-tls-key-pair podName:eb37b835-d99b-4893-a0a2-659afa1391e4 nodeName:}" failed. No retries permitted until 2025-10-06 13:50:27.378932087 +0000 UTC m=+715.876250624 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "tls-key-pair" (UniqueName: "kubernetes.io/secret/eb37b835-d99b-4893-a0a2-659afa1391e4-tls-key-pair") pod "nmstate-webhook-6cdbc54649-db26x" (UID: "eb37b835-d99b-4893-a0a2-659afa1391e4") : secret "openshift-nmstate-webhook" not found Oct 06 13:50:26 crc kubenswrapper[4757]: I1006 13:50:26.907586 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s8z46\" (UniqueName: \"kubernetes.io/projected/eb37b835-d99b-4893-a0a2-659afa1391e4-kube-api-access-s8z46\") pod \"nmstate-webhook-6cdbc54649-db26x\" (UID: \"eb37b835-d99b-4893-a0a2-659afa1391e4\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-db26x" Oct 06 13:50:26 crc kubenswrapper[4757]: I1006 13:50:26.925451 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-44wx7\" (UniqueName: \"kubernetes.io/projected/54800f67-40f0-4ea1-bc26-76baed5d2663-kube-api-access-44wx7\") pod \"nmstate-metrics-fdff9cb8d-lhkl6\" (UID: \"54800f67-40f0-4ea1-bc26-76baed5d2663\") " pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-lhkl6" Oct 06 13:50:26 crc kubenswrapper[4757]: I1006 13:50:26.979524 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-55bsp\" (UniqueName: \"kubernetes.io/projected/d715a33c-ce03-4762-8091-b73d2b53f929-kube-api-access-55bsp\") pod \"nmstate-handler-xw9cw\" (UID: \"d715a33c-ce03-4762-8091-b73d2b53f929\") " pod="openshift-nmstate/nmstate-handler-xw9cw" Oct 06 13:50:26 crc kubenswrapper[4757]: I1006 13:50:26.979780 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/c6cb2b63-6ae0-4d31-8b00-de6ffff21df7-nginx-conf\") pod \"nmstate-console-plugin-6b874cbd85-ccdp9\" (UID: \"c6cb2b63-6ae0-4d31-8b00-de6ffff21df7\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-ccdp9" Oct 06 13:50:26 crc kubenswrapper[4757]: I1006 13:50:26.979850 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/c6cb2b63-6ae0-4d31-8b00-de6ffff21df7-plugin-serving-cert\") pod \"nmstate-console-plugin-6b874cbd85-ccdp9\" (UID: \"c6cb2b63-6ae0-4d31-8b00-de6ffff21df7\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-ccdp9" Oct 06 13:50:26 crc kubenswrapper[4757]: I1006 13:50:26.979889 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/d715a33c-ce03-4762-8091-b73d2b53f929-nmstate-lock\") pod \"nmstate-handler-xw9cw\" (UID: \"d715a33c-ce03-4762-8091-b73d2b53f929\") " pod="openshift-nmstate/nmstate-handler-xw9cw" Oct 06 13:50:26 crc kubenswrapper[4757]: I1006 13:50:26.979915 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/d715a33c-ce03-4762-8091-b73d2b53f929-ovs-socket\") pod \"nmstate-handler-xw9cw\" (UID: \"d715a33c-ce03-4762-8091-b73d2b53f929\") " pod="openshift-nmstate/nmstate-handler-xw9cw" Oct 06 13:50:26 crc kubenswrapper[4757]: I1006 13:50:26.979946 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/d715a33c-ce03-4762-8091-b73d2b53f929-dbus-socket\") pod \"nmstate-handler-xw9cw\" (UID: \"d715a33c-ce03-4762-8091-b73d2b53f929\") " pod="openshift-nmstate/nmstate-handler-xw9cw" Oct 06 13:50:26 crc kubenswrapper[4757]: I1006 13:50:26.979973 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/d715a33c-ce03-4762-8091-b73d2b53f929-nmstate-lock\") pod \"nmstate-handler-xw9cw\" (UID: \"d715a33c-ce03-4762-8091-b73d2b53f929\") " pod="openshift-nmstate/nmstate-handler-xw9cw" Oct 06 13:50:26 crc kubenswrapper[4757]: E1006 13:50:26.979999 4757 secret.go:188] Couldn't get secret openshift-nmstate/plugin-serving-cert: secret "plugin-serving-cert" not found Oct 06 13:50:26 crc kubenswrapper[4757]: I1006 13:50:26.980009 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/d715a33c-ce03-4762-8091-b73d2b53f929-ovs-socket\") pod \"nmstate-handler-xw9cw\" (UID: \"d715a33c-ce03-4762-8091-b73d2b53f929\") " pod="openshift-nmstate/nmstate-handler-xw9cw" Oct 06 13:50:26 crc kubenswrapper[4757]: E1006 13:50:26.980056 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c6cb2b63-6ae0-4d31-8b00-de6ffff21df7-plugin-serving-cert podName:c6cb2b63-6ae0-4d31-8b00-de6ffff21df7 nodeName:}" failed. No retries permitted until 2025-10-06 13:50:27.48003689 +0000 UTC m=+715.977355537 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "plugin-serving-cert" (UniqueName: "kubernetes.io/secret/c6cb2b63-6ae0-4d31-8b00-de6ffff21df7-plugin-serving-cert") pod "nmstate-console-plugin-6b874cbd85-ccdp9" (UID: "c6cb2b63-6ae0-4d31-8b00-de6ffff21df7") : secret "plugin-serving-cert" not found Oct 06 13:50:26 crc kubenswrapper[4757]: I1006 13:50:26.980077 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nnvvp\" (UniqueName: \"kubernetes.io/projected/c6cb2b63-6ae0-4d31-8b00-de6ffff21df7-kube-api-access-nnvvp\") pod \"nmstate-console-plugin-6b874cbd85-ccdp9\" (UID: \"c6cb2b63-6ae0-4d31-8b00-de6ffff21df7\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-ccdp9" Oct 06 13:50:26 crc kubenswrapper[4757]: I1006 13:50:26.980326 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/d715a33c-ce03-4762-8091-b73d2b53f929-dbus-socket\") pod \"nmstate-handler-xw9cw\" (UID: \"d715a33c-ce03-4762-8091-b73d2b53f929\") " pod="openshift-nmstate/nmstate-handler-xw9cw" Oct 06 13:50:26 crc kubenswrapper[4757]: I1006 13:50:26.980892 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/c6cb2b63-6ae0-4d31-8b00-de6ffff21df7-nginx-conf\") pod \"nmstate-console-plugin-6b874cbd85-ccdp9\" (UID: \"c6cb2b63-6ae0-4d31-8b00-de6ffff21df7\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-ccdp9" Oct 06 13:50:27 crc kubenswrapper[4757]: I1006 13:50:27.006230 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nnvvp\" (UniqueName: \"kubernetes.io/projected/c6cb2b63-6ae0-4d31-8b00-de6ffff21df7-kube-api-access-nnvvp\") pod \"nmstate-console-plugin-6b874cbd85-ccdp9\" (UID: \"c6cb2b63-6ae0-4d31-8b00-de6ffff21df7\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-ccdp9" Oct 06 13:50:27 crc kubenswrapper[4757]: I1006 13:50:27.008236 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-55bsp\" (UniqueName: \"kubernetes.io/projected/d715a33c-ce03-4762-8091-b73d2b53f929-kube-api-access-55bsp\") pod \"nmstate-handler-xw9cw\" (UID: \"d715a33c-ce03-4762-8091-b73d2b53f929\") " pod="openshift-nmstate/nmstate-handler-xw9cw" Oct 06 13:50:27 crc kubenswrapper[4757]: I1006 13:50:27.063541 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-lhkl6" Oct 06 13:50:27 crc kubenswrapper[4757]: I1006 13:50:27.078038 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-7c96666cff-2f9sl"] Oct 06 13:50:27 crc kubenswrapper[4757]: I1006 13:50:27.078848 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-7c96666cff-2f9sl" Oct 06 13:50:27 crc kubenswrapper[4757]: I1006 13:50:27.095913 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-xw9cw" Oct 06 13:50:27 crc kubenswrapper[4757]: I1006 13:50:27.122606 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-7c96666cff-2f9sl"] Oct 06 13:50:27 crc kubenswrapper[4757]: I1006 13:50:27.181866 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f9z4f\" (UniqueName: \"kubernetes.io/projected/f12ca4f5-7bfa-4ca8-b4eb-8cad0f6c14bd-kube-api-access-f9z4f\") pod \"console-7c96666cff-2f9sl\" (UID: \"f12ca4f5-7bfa-4ca8-b4eb-8cad0f6c14bd\") " pod="openshift-console/console-7c96666cff-2f9sl" Oct 06 13:50:27 crc kubenswrapper[4757]: I1006 13:50:27.181920 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/f12ca4f5-7bfa-4ca8-b4eb-8cad0f6c14bd-console-serving-cert\") pod \"console-7c96666cff-2f9sl\" (UID: \"f12ca4f5-7bfa-4ca8-b4eb-8cad0f6c14bd\") " pod="openshift-console/console-7c96666cff-2f9sl" Oct 06 13:50:27 crc kubenswrapper[4757]: I1006 13:50:27.181943 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/f12ca4f5-7bfa-4ca8-b4eb-8cad0f6c14bd-console-oauth-config\") pod \"console-7c96666cff-2f9sl\" (UID: \"f12ca4f5-7bfa-4ca8-b4eb-8cad0f6c14bd\") " pod="openshift-console/console-7c96666cff-2f9sl" Oct 06 13:50:27 crc kubenswrapper[4757]: I1006 13:50:27.182011 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/f12ca4f5-7bfa-4ca8-b4eb-8cad0f6c14bd-console-config\") pod \"console-7c96666cff-2f9sl\" (UID: \"f12ca4f5-7bfa-4ca8-b4eb-8cad0f6c14bd\") " pod="openshift-console/console-7c96666cff-2f9sl" Oct 06 13:50:27 crc kubenswrapper[4757]: I1006 13:50:27.182036 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f12ca4f5-7bfa-4ca8-b4eb-8cad0f6c14bd-trusted-ca-bundle\") pod \"console-7c96666cff-2f9sl\" (UID: \"f12ca4f5-7bfa-4ca8-b4eb-8cad0f6c14bd\") " pod="openshift-console/console-7c96666cff-2f9sl" Oct 06 13:50:27 crc kubenswrapper[4757]: I1006 13:50:27.182061 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/f12ca4f5-7bfa-4ca8-b4eb-8cad0f6c14bd-oauth-serving-cert\") pod \"console-7c96666cff-2f9sl\" (UID: \"f12ca4f5-7bfa-4ca8-b4eb-8cad0f6c14bd\") " pod="openshift-console/console-7c96666cff-2f9sl" Oct 06 13:50:27 crc kubenswrapper[4757]: I1006 13:50:27.182078 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/f12ca4f5-7bfa-4ca8-b4eb-8cad0f6c14bd-service-ca\") pod \"console-7c96666cff-2f9sl\" (UID: \"f12ca4f5-7bfa-4ca8-b4eb-8cad0f6c14bd\") " pod="openshift-console/console-7c96666cff-2f9sl" Oct 06 13:50:27 crc kubenswrapper[4757]: I1006 13:50:27.282869 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/f12ca4f5-7bfa-4ca8-b4eb-8cad0f6c14bd-oauth-serving-cert\") pod \"console-7c96666cff-2f9sl\" (UID: \"f12ca4f5-7bfa-4ca8-b4eb-8cad0f6c14bd\") " pod="openshift-console/console-7c96666cff-2f9sl" Oct 06 13:50:27 crc kubenswrapper[4757]: I1006 13:50:27.283260 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/f12ca4f5-7bfa-4ca8-b4eb-8cad0f6c14bd-service-ca\") pod \"console-7c96666cff-2f9sl\" (UID: \"f12ca4f5-7bfa-4ca8-b4eb-8cad0f6c14bd\") " pod="openshift-console/console-7c96666cff-2f9sl" Oct 06 13:50:27 crc kubenswrapper[4757]: I1006 13:50:27.283294 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f9z4f\" (UniqueName: \"kubernetes.io/projected/f12ca4f5-7bfa-4ca8-b4eb-8cad0f6c14bd-kube-api-access-f9z4f\") pod \"console-7c96666cff-2f9sl\" (UID: \"f12ca4f5-7bfa-4ca8-b4eb-8cad0f6c14bd\") " pod="openshift-console/console-7c96666cff-2f9sl" Oct 06 13:50:27 crc kubenswrapper[4757]: I1006 13:50:27.283314 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/f12ca4f5-7bfa-4ca8-b4eb-8cad0f6c14bd-console-serving-cert\") pod \"console-7c96666cff-2f9sl\" (UID: \"f12ca4f5-7bfa-4ca8-b4eb-8cad0f6c14bd\") " pod="openshift-console/console-7c96666cff-2f9sl" Oct 06 13:50:27 crc kubenswrapper[4757]: I1006 13:50:27.283335 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/f12ca4f5-7bfa-4ca8-b4eb-8cad0f6c14bd-console-oauth-config\") pod \"console-7c96666cff-2f9sl\" (UID: \"f12ca4f5-7bfa-4ca8-b4eb-8cad0f6c14bd\") " pod="openshift-console/console-7c96666cff-2f9sl" Oct 06 13:50:27 crc kubenswrapper[4757]: I1006 13:50:27.283395 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/f12ca4f5-7bfa-4ca8-b4eb-8cad0f6c14bd-console-config\") pod \"console-7c96666cff-2f9sl\" (UID: \"f12ca4f5-7bfa-4ca8-b4eb-8cad0f6c14bd\") " pod="openshift-console/console-7c96666cff-2f9sl" Oct 06 13:50:27 crc kubenswrapper[4757]: I1006 13:50:27.283417 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f12ca4f5-7bfa-4ca8-b4eb-8cad0f6c14bd-trusted-ca-bundle\") pod \"console-7c96666cff-2f9sl\" (UID: \"f12ca4f5-7bfa-4ca8-b4eb-8cad0f6c14bd\") " pod="openshift-console/console-7c96666cff-2f9sl" Oct 06 13:50:27 crc kubenswrapper[4757]: I1006 13:50:27.283946 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/f12ca4f5-7bfa-4ca8-b4eb-8cad0f6c14bd-oauth-serving-cert\") pod \"console-7c96666cff-2f9sl\" (UID: \"f12ca4f5-7bfa-4ca8-b4eb-8cad0f6c14bd\") " pod="openshift-console/console-7c96666cff-2f9sl" Oct 06 13:50:27 crc kubenswrapper[4757]: I1006 13:50:27.284629 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f12ca4f5-7bfa-4ca8-b4eb-8cad0f6c14bd-trusted-ca-bundle\") pod \"console-7c96666cff-2f9sl\" (UID: \"f12ca4f5-7bfa-4ca8-b4eb-8cad0f6c14bd\") " pod="openshift-console/console-7c96666cff-2f9sl" Oct 06 13:50:27 crc kubenswrapper[4757]: I1006 13:50:27.284942 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/f12ca4f5-7bfa-4ca8-b4eb-8cad0f6c14bd-service-ca\") pod \"console-7c96666cff-2f9sl\" (UID: \"f12ca4f5-7bfa-4ca8-b4eb-8cad0f6c14bd\") " pod="openshift-console/console-7c96666cff-2f9sl" Oct 06 13:50:27 crc kubenswrapper[4757]: I1006 13:50:27.287588 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/f12ca4f5-7bfa-4ca8-b4eb-8cad0f6c14bd-console-serving-cert\") pod \"console-7c96666cff-2f9sl\" (UID: \"f12ca4f5-7bfa-4ca8-b4eb-8cad0f6c14bd\") " pod="openshift-console/console-7c96666cff-2f9sl" Oct 06 13:50:27 crc kubenswrapper[4757]: I1006 13:50:27.287701 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/f12ca4f5-7bfa-4ca8-b4eb-8cad0f6c14bd-console-oauth-config\") pod \"console-7c96666cff-2f9sl\" (UID: \"f12ca4f5-7bfa-4ca8-b4eb-8cad0f6c14bd\") " pod="openshift-console/console-7c96666cff-2f9sl" Oct 06 13:50:27 crc kubenswrapper[4757]: I1006 13:50:27.287798 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/f12ca4f5-7bfa-4ca8-b4eb-8cad0f6c14bd-console-config\") pod \"console-7c96666cff-2f9sl\" (UID: \"f12ca4f5-7bfa-4ca8-b4eb-8cad0f6c14bd\") " pod="openshift-console/console-7c96666cff-2f9sl" Oct 06 13:50:27 crc kubenswrapper[4757]: I1006 13:50:27.314201 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f9z4f\" (UniqueName: \"kubernetes.io/projected/f12ca4f5-7bfa-4ca8-b4eb-8cad0f6c14bd-kube-api-access-f9z4f\") pod \"console-7c96666cff-2f9sl\" (UID: \"f12ca4f5-7bfa-4ca8-b4eb-8cad0f6c14bd\") " pod="openshift-console/console-7c96666cff-2f9sl" Oct 06 13:50:27 crc kubenswrapper[4757]: I1006 13:50:27.368651 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-fdff9cb8d-lhkl6"] Oct 06 13:50:27 crc kubenswrapper[4757]: W1006 13:50:27.384452 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod54800f67_40f0_4ea1_bc26_76baed5d2663.slice/crio-c04c1ac4c06a2ca68c1f90fcb8758a1af129e8cfa433a695d245b51938da0cc1 WatchSource:0}: Error finding container c04c1ac4c06a2ca68c1f90fcb8758a1af129e8cfa433a695d245b51938da0cc1: Status 404 returned error can't find the container with id c04c1ac4c06a2ca68c1f90fcb8758a1af129e8cfa433a695d245b51938da0cc1 Oct 06 13:50:27 crc kubenswrapper[4757]: I1006 13:50:27.384821 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/eb37b835-d99b-4893-a0a2-659afa1391e4-tls-key-pair\") pod \"nmstate-webhook-6cdbc54649-db26x\" (UID: \"eb37b835-d99b-4893-a0a2-659afa1391e4\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-db26x" Oct 06 13:50:27 crc kubenswrapper[4757]: I1006 13:50:27.389199 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/eb37b835-d99b-4893-a0a2-659afa1391e4-tls-key-pair\") pod \"nmstate-webhook-6cdbc54649-db26x\" (UID: \"eb37b835-d99b-4893-a0a2-659afa1391e4\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-db26x" Oct 06 13:50:27 crc kubenswrapper[4757]: I1006 13:50:27.426590 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-7c96666cff-2f9sl" Oct 06 13:50:27 crc kubenswrapper[4757]: I1006 13:50:27.486499 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/c6cb2b63-6ae0-4d31-8b00-de6ffff21df7-plugin-serving-cert\") pod \"nmstate-console-plugin-6b874cbd85-ccdp9\" (UID: \"c6cb2b63-6ae0-4d31-8b00-de6ffff21df7\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-ccdp9" Oct 06 13:50:27 crc kubenswrapper[4757]: I1006 13:50:27.490115 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/c6cb2b63-6ae0-4d31-8b00-de6ffff21df7-plugin-serving-cert\") pod \"nmstate-console-plugin-6b874cbd85-ccdp9\" (UID: \"c6cb2b63-6ae0-4d31-8b00-de6ffff21df7\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-ccdp9" Oct 06 13:50:27 crc kubenswrapper[4757]: I1006 13:50:27.682486 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-db26x" Oct 06 13:50:27 crc kubenswrapper[4757]: I1006 13:50:27.780487 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-ccdp9" Oct 06 13:50:27 crc kubenswrapper[4757]: I1006 13:50:27.833727 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-7c96666cff-2f9sl"] Oct 06 13:50:27 crc kubenswrapper[4757]: W1006 13:50:27.847198 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf12ca4f5_7bfa_4ca8_b4eb_8cad0f6c14bd.slice/crio-ff66c3967a6271ea34365c45aa1135e6e76ed965f19affb4351cd664e2139261 WatchSource:0}: Error finding container ff66c3967a6271ea34365c45aa1135e6e76ed965f19affb4351cd664e2139261: Status 404 returned error can't find the container with id ff66c3967a6271ea34365c45aa1135e6e76ed965f19affb4351cd664e2139261 Oct 06 13:50:27 crc kubenswrapper[4757]: I1006 13:50:27.858548 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-7c96666cff-2f9sl" event={"ID":"f12ca4f5-7bfa-4ca8-b4eb-8cad0f6c14bd","Type":"ContainerStarted","Data":"ff66c3967a6271ea34365c45aa1135e6e76ed965f19affb4351cd664e2139261"} Oct 06 13:50:27 crc kubenswrapper[4757]: I1006 13:50:27.862110 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-xw9cw" event={"ID":"d715a33c-ce03-4762-8091-b73d2b53f929","Type":"ContainerStarted","Data":"baaabc26c7a8a48418f5c80194294f63c6c3a4026db6883e36cdce409f06fc21"} Oct 06 13:50:27 crc kubenswrapper[4757]: I1006 13:50:27.865258 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-lhkl6" event={"ID":"54800f67-40f0-4ea1-bc26-76baed5d2663","Type":"ContainerStarted","Data":"c04c1ac4c06a2ca68c1f90fcb8758a1af129e8cfa433a695d245b51938da0cc1"} Oct 06 13:50:27 crc kubenswrapper[4757]: I1006 13:50:27.888197 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-6cdbc54649-db26x"] Oct 06 13:50:27 crc kubenswrapper[4757]: W1006 13:50:27.905600 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podeb37b835_d99b_4893_a0a2_659afa1391e4.slice/crio-cf4b12c64aa18685875bb9f97b43f96494a3f08e29fc065b2b71cbe65b68fd43 WatchSource:0}: Error finding container cf4b12c64aa18685875bb9f97b43f96494a3f08e29fc065b2b71cbe65b68fd43: Status 404 returned error can't find the container with id cf4b12c64aa18685875bb9f97b43f96494a3f08e29fc065b2b71cbe65b68fd43 Oct 06 13:50:27 crc kubenswrapper[4757]: I1006 13:50:27.960697 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-6b874cbd85-ccdp9"] Oct 06 13:50:27 crc kubenswrapper[4757]: W1006 13:50:27.972127 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc6cb2b63_6ae0_4d31_8b00_de6ffff21df7.slice/crio-7dfcf80277cb890861053786dd1d7ccac68ce0ed2a5147e40775963fcf055b1d WatchSource:0}: Error finding container 7dfcf80277cb890861053786dd1d7ccac68ce0ed2a5147e40775963fcf055b1d: Status 404 returned error can't find the container with id 7dfcf80277cb890861053786dd1d7ccac68ce0ed2a5147e40775963fcf055b1d Oct 06 13:50:28 crc kubenswrapper[4757]: I1006 13:50:28.873616 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-7c96666cff-2f9sl" event={"ID":"f12ca4f5-7bfa-4ca8-b4eb-8cad0f6c14bd","Type":"ContainerStarted","Data":"357e60f910ea0ff7f80b15c87c0eab5be6c722e7b02d8c2e2eac45db2ec756df"} Oct 06 13:50:28 crc kubenswrapper[4757]: I1006 13:50:28.876691 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-db26x" event={"ID":"eb37b835-d99b-4893-a0a2-659afa1391e4","Type":"ContainerStarted","Data":"cf4b12c64aa18685875bb9f97b43f96494a3f08e29fc065b2b71cbe65b68fd43"} Oct 06 13:50:28 crc kubenswrapper[4757]: I1006 13:50:28.878052 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-ccdp9" event={"ID":"c6cb2b63-6ae0-4d31-8b00-de6ffff21df7","Type":"ContainerStarted","Data":"7dfcf80277cb890861053786dd1d7ccac68ce0ed2a5147e40775963fcf055b1d"} Oct 06 13:50:30 crc kubenswrapper[4757]: I1006 13:50:30.893680 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-xw9cw" event={"ID":"d715a33c-ce03-4762-8091-b73d2b53f929","Type":"ContainerStarted","Data":"c346ef6e2a22548557ca59b01f0e997632cfbc9ddc5975f05e0ba43aaff30273"} Oct 06 13:50:30 crc kubenswrapper[4757]: I1006 13:50:30.894276 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-xw9cw" Oct 06 13:50:30 crc kubenswrapper[4757]: I1006 13:50:30.895968 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-db26x" event={"ID":"eb37b835-d99b-4893-a0a2-659afa1391e4","Type":"ContainerStarted","Data":"b9b6a9b5eda03887f0434e08add81c2f5597f09808a8671d1e38e7b54e46e023"} Oct 06 13:50:30 crc kubenswrapper[4757]: I1006 13:50:30.896379 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-db26x" Oct 06 13:50:30 crc kubenswrapper[4757]: I1006 13:50:30.897971 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-lhkl6" event={"ID":"54800f67-40f0-4ea1-bc26-76baed5d2663","Type":"ContainerStarted","Data":"4cb5f7b6a926d7c5bba05b6efbbb50bdfa82dde83f210cc5c368bf39c8b96344"} Oct 06 13:50:30 crc kubenswrapper[4757]: I1006 13:50:30.899442 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-ccdp9" event={"ID":"c6cb2b63-6ae0-4d31-8b00-de6ffff21df7","Type":"ContainerStarted","Data":"954d2272c0851f1dfc773134196e4b897750b9e3a7fb8522b51aac97be45ea4a"} Oct 06 13:50:30 crc kubenswrapper[4757]: I1006 13:50:30.914864 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-xw9cw" podStartSLOduration=1.979552529 podStartE2EDuration="4.914840271s" podCreationTimestamp="2025-10-06 13:50:26 +0000 UTC" firstStartedPulling="2025-10-06 13:50:27.127273002 +0000 UTC m=+715.624591539" lastFinishedPulling="2025-10-06 13:50:30.062560744 +0000 UTC m=+718.559879281" observedRunningTime="2025-10-06 13:50:30.910372446 +0000 UTC m=+719.407691013" watchObservedRunningTime="2025-10-06 13:50:30.914840271 +0000 UTC m=+719.412158818" Oct 06 13:50:30 crc kubenswrapper[4757]: I1006 13:50:30.921716 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-7c96666cff-2f9sl" podStartSLOduration=3.921689863 podStartE2EDuration="3.921689863s" podCreationTimestamp="2025-10-06 13:50:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:50:28.893471108 +0000 UTC m=+717.390789655" watchObservedRunningTime="2025-10-06 13:50:30.921689863 +0000 UTC m=+719.419008430" Oct 06 13:50:30 crc kubenswrapper[4757]: I1006 13:50:30.926526 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-ccdp9" podStartSLOduration=2.842654057 podStartE2EDuration="4.92650981s" podCreationTimestamp="2025-10-06 13:50:26 +0000 UTC" firstStartedPulling="2025-10-06 13:50:27.977337107 +0000 UTC m=+716.474655644" lastFinishedPulling="2025-10-06 13:50:30.06119286 +0000 UTC m=+718.558511397" observedRunningTime="2025-10-06 13:50:30.925030842 +0000 UTC m=+719.422349409" watchObservedRunningTime="2025-10-06 13:50:30.92650981 +0000 UTC m=+719.423828357" Oct 06 13:50:30 crc kubenswrapper[4757]: I1006 13:50:30.956662 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-db26x" podStartSLOduration=2.787957842 podStartE2EDuration="4.956635659s" podCreationTimestamp="2025-10-06 13:50:26 +0000 UTC" firstStartedPulling="2025-10-06 13:50:27.908239293 +0000 UTC m=+716.405557830" lastFinishedPulling="2025-10-06 13:50:30.07691711 +0000 UTC m=+718.574235647" observedRunningTime="2025-10-06 13:50:30.949070662 +0000 UTC m=+719.446389199" watchObservedRunningTime="2025-10-06 13:50:30.956635659 +0000 UTC m=+719.453954206" Oct 06 13:50:32 crc kubenswrapper[4757]: I1006 13:50:32.917703 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-lhkl6" event={"ID":"54800f67-40f0-4ea1-bc26-76baed5d2663","Type":"ContainerStarted","Data":"92170f2e48daef3678ad1915970ec8a56b78c01b9da7764a86d4c8103704f209"} Oct 06 13:50:32 crc kubenswrapper[4757]: I1006 13:50:32.935527 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-lhkl6" podStartSLOduration=1.706027486 podStartE2EDuration="6.935496321s" podCreationTimestamp="2025-10-06 13:50:26 +0000 UTC" firstStartedPulling="2025-10-06 13:50:27.385936011 +0000 UTC m=+715.883254548" lastFinishedPulling="2025-10-06 13:50:32.615404856 +0000 UTC m=+721.112723383" observedRunningTime="2025-10-06 13:50:32.933817747 +0000 UTC m=+721.431136364" watchObservedRunningTime="2025-10-06 13:50:32.935496321 +0000 UTC m=+721.432814898" Oct 06 13:50:34 crc kubenswrapper[4757]: I1006 13:50:34.361003 4757 patch_prober.go:28] interesting pod/machine-config-daemon-7tb7h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 06 13:50:34 crc kubenswrapper[4757]: I1006 13:50:34.361461 4757 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 06 13:50:37 crc kubenswrapper[4757]: I1006 13:50:37.126854 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-xw9cw" Oct 06 13:50:37 crc kubenswrapper[4757]: I1006 13:50:37.427717 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-7c96666cff-2f9sl" Oct 06 13:50:37 crc kubenswrapper[4757]: I1006 13:50:37.428369 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-7c96666cff-2f9sl" Oct 06 13:50:37 crc kubenswrapper[4757]: I1006 13:50:37.437724 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-7c96666cff-2f9sl" Oct 06 13:50:37 crc kubenswrapper[4757]: I1006 13:50:37.953731 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-7c96666cff-2f9sl" Oct 06 13:50:38 crc kubenswrapper[4757]: I1006 13:50:38.014375 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-xbn64"] Oct 06 13:50:47 crc kubenswrapper[4757]: I1006 13:50:47.690431 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-db26x" Oct 06 13:50:59 crc kubenswrapper[4757]: I1006 13:50:59.619993 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-ssqcq"] Oct 06 13:50:59 crc kubenswrapper[4757]: I1006 13:50:59.620930 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-ssqcq" podUID="3d4a2fb3-0d5a-4be4-afe7-acc6e04917f9" containerName="controller-manager" containerID="cri-o://376643414d8cc06a7fe6f7258b4e3e6059a91369dca5d6165feac373605c482d" gracePeriod=30 Oct 06 13:50:59 crc kubenswrapper[4757]: I1006 13:50:59.720906 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-np4zr"] Oct 06 13:50:59 crc kubenswrapper[4757]: I1006 13:50:59.721289 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-np4zr" podUID="74caa6de-7695-4dcb-9daf-f3368905de1c" containerName="route-controller-manager" containerID="cri-o://b6e369a0187661a348e078177f73131979eee2904487669e5986defb4251d5fa" gracePeriod=30 Oct 06 13:50:59 crc kubenswrapper[4757]: I1006 13:50:59.986592 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-ssqcq" Oct 06 13:51:00 crc kubenswrapper[4757]: I1006 13:51:00.068914 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-np4zr" Oct 06 13:51:00 crc kubenswrapper[4757]: I1006 13:51:00.089000 4757 generic.go:334] "Generic (PLEG): container finished" podID="3d4a2fb3-0d5a-4be4-afe7-acc6e04917f9" containerID="376643414d8cc06a7fe6f7258b4e3e6059a91369dca5d6165feac373605c482d" exitCode=0 Oct 06 13:51:00 crc kubenswrapper[4757]: I1006 13:51:00.089137 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-ssqcq" Oct 06 13:51:00 crc kubenswrapper[4757]: I1006 13:51:00.089537 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-ssqcq" event={"ID":"3d4a2fb3-0d5a-4be4-afe7-acc6e04917f9","Type":"ContainerDied","Data":"376643414d8cc06a7fe6f7258b4e3e6059a91369dca5d6165feac373605c482d"} Oct 06 13:51:00 crc kubenswrapper[4757]: I1006 13:51:00.089562 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-ssqcq" event={"ID":"3d4a2fb3-0d5a-4be4-afe7-acc6e04917f9","Type":"ContainerDied","Data":"fd13a70234db778f77fab891672d2261960df715fb7f8f6893ee58e865d76e02"} Oct 06 13:51:00 crc kubenswrapper[4757]: I1006 13:51:00.089577 4757 scope.go:117] "RemoveContainer" containerID="376643414d8cc06a7fe6f7258b4e3e6059a91369dca5d6165feac373605c482d" Oct 06 13:51:00 crc kubenswrapper[4757]: I1006 13:51:00.095308 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3d4a2fb3-0d5a-4be4-afe7-acc6e04917f9-serving-cert\") pod \"3d4a2fb3-0d5a-4be4-afe7-acc6e04917f9\" (UID: \"3d4a2fb3-0d5a-4be4-afe7-acc6e04917f9\") " Oct 06 13:51:00 crc kubenswrapper[4757]: I1006 13:51:00.095799 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/74caa6de-7695-4dcb-9daf-f3368905de1c-client-ca\") pod \"74caa6de-7695-4dcb-9daf-f3368905de1c\" (UID: \"74caa6de-7695-4dcb-9daf-f3368905de1c\") " Oct 06 13:51:00 crc kubenswrapper[4757]: I1006 13:51:00.095902 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/3d4a2fb3-0d5a-4be4-afe7-acc6e04917f9-proxy-ca-bundles\") pod \"3d4a2fb3-0d5a-4be4-afe7-acc6e04917f9\" (UID: \"3d4a2fb3-0d5a-4be4-afe7-acc6e04917f9\") " Oct 06 13:51:00 crc kubenswrapper[4757]: I1006 13:51:00.095970 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3d4a2fb3-0d5a-4be4-afe7-acc6e04917f9-config\") pod \"3d4a2fb3-0d5a-4be4-afe7-acc6e04917f9\" (UID: \"3d4a2fb3-0d5a-4be4-afe7-acc6e04917f9\") " Oct 06 13:51:00 crc kubenswrapper[4757]: I1006 13:51:00.096073 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/74caa6de-7695-4dcb-9daf-f3368905de1c-serving-cert\") pod \"74caa6de-7695-4dcb-9daf-f3368905de1c\" (UID: \"74caa6de-7695-4dcb-9daf-f3368905de1c\") " Oct 06 13:51:00 crc kubenswrapper[4757]: I1006 13:51:00.096165 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/74caa6de-7695-4dcb-9daf-f3368905de1c-config\") pod \"74caa6de-7695-4dcb-9daf-f3368905de1c\" (UID: \"74caa6de-7695-4dcb-9daf-f3368905de1c\") " Oct 06 13:51:00 crc kubenswrapper[4757]: I1006 13:51:00.096229 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p7kbs\" (UniqueName: \"kubernetes.io/projected/74caa6de-7695-4dcb-9daf-f3368905de1c-kube-api-access-p7kbs\") pod \"74caa6de-7695-4dcb-9daf-f3368905de1c\" (UID: \"74caa6de-7695-4dcb-9daf-f3368905de1c\") " Oct 06 13:51:00 crc kubenswrapper[4757]: I1006 13:51:00.096398 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n28tn\" (UniqueName: \"kubernetes.io/projected/3d4a2fb3-0d5a-4be4-afe7-acc6e04917f9-kube-api-access-n28tn\") pod \"3d4a2fb3-0d5a-4be4-afe7-acc6e04917f9\" (UID: \"3d4a2fb3-0d5a-4be4-afe7-acc6e04917f9\") " Oct 06 13:51:00 crc kubenswrapper[4757]: I1006 13:51:00.096468 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3d4a2fb3-0d5a-4be4-afe7-acc6e04917f9-client-ca\") pod \"3d4a2fb3-0d5a-4be4-afe7-acc6e04917f9\" (UID: \"3d4a2fb3-0d5a-4be4-afe7-acc6e04917f9\") " Oct 06 13:51:00 crc kubenswrapper[4757]: I1006 13:51:00.096224 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-np4zr" Oct 06 13:51:00 crc kubenswrapper[4757]: I1006 13:51:00.096778 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/74caa6de-7695-4dcb-9daf-f3368905de1c-client-ca" (OuterVolumeSpecName: "client-ca") pod "74caa6de-7695-4dcb-9daf-f3368905de1c" (UID: "74caa6de-7695-4dcb-9daf-f3368905de1c"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:51:00 crc kubenswrapper[4757]: I1006 13:51:00.096905 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/74caa6de-7695-4dcb-9daf-f3368905de1c-config" (OuterVolumeSpecName: "config") pod "74caa6de-7695-4dcb-9daf-f3368905de1c" (UID: "74caa6de-7695-4dcb-9daf-f3368905de1c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:51:00 crc kubenswrapper[4757]: I1006 13:51:00.097134 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3d4a2fb3-0d5a-4be4-afe7-acc6e04917f9-client-ca" (OuterVolumeSpecName: "client-ca") pod "3d4a2fb3-0d5a-4be4-afe7-acc6e04917f9" (UID: "3d4a2fb3-0d5a-4be4-afe7-acc6e04917f9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:51:00 crc kubenswrapper[4757]: I1006 13:51:00.097297 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3d4a2fb3-0d5a-4be4-afe7-acc6e04917f9-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "3d4a2fb3-0d5a-4be4-afe7-acc6e04917f9" (UID: "3d4a2fb3-0d5a-4be4-afe7-acc6e04917f9"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:51:00 crc kubenswrapper[4757]: I1006 13:51:00.097360 4757 generic.go:334] "Generic (PLEG): container finished" podID="74caa6de-7695-4dcb-9daf-f3368905de1c" containerID="b6e369a0187661a348e078177f73131979eee2904487669e5986defb4251d5fa" exitCode=0 Oct 06 13:51:00 crc kubenswrapper[4757]: I1006 13:51:00.097450 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-np4zr" event={"ID":"74caa6de-7695-4dcb-9daf-f3368905de1c","Type":"ContainerDied","Data":"b6e369a0187661a348e078177f73131979eee2904487669e5986defb4251d5fa"} Oct 06 13:51:00 crc kubenswrapper[4757]: I1006 13:51:00.097531 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-np4zr" event={"ID":"74caa6de-7695-4dcb-9daf-f3368905de1c","Type":"ContainerDied","Data":"805278454ff71deb2bd4d2d2c9d777de1c28e22dee86d0b93a7562ebf5b726d7"} Oct 06 13:51:00 crc kubenswrapper[4757]: I1006 13:51:00.097692 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3d4a2fb3-0d5a-4be4-afe7-acc6e04917f9-config" (OuterVolumeSpecName: "config") pod "3d4a2fb3-0d5a-4be4-afe7-acc6e04917f9" (UID: "3d4a2fb3-0d5a-4be4-afe7-acc6e04917f9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:51:00 crc kubenswrapper[4757]: I1006 13:51:00.102511 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3d4a2fb3-0d5a-4be4-afe7-acc6e04917f9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "3d4a2fb3-0d5a-4be4-afe7-acc6e04917f9" (UID: "3d4a2fb3-0d5a-4be4-afe7-acc6e04917f9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:51:00 crc kubenswrapper[4757]: I1006 13:51:00.102517 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/74caa6de-7695-4dcb-9daf-f3368905de1c-kube-api-access-p7kbs" (OuterVolumeSpecName: "kube-api-access-p7kbs") pod "74caa6de-7695-4dcb-9daf-f3368905de1c" (UID: "74caa6de-7695-4dcb-9daf-f3368905de1c"). InnerVolumeSpecName "kube-api-access-p7kbs". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:51:00 crc kubenswrapper[4757]: I1006 13:51:00.102872 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3d4a2fb3-0d5a-4be4-afe7-acc6e04917f9-kube-api-access-n28tn" (OuterVolumeSpecName: "kube-api-access-n28tn") pod "3d4a2fb3-0d5a-4be4-afe7-acc6e04917f9" (UID: "3d4a2fb3-0d5a-4be4-afe7-acc6e04917f9"). InnerVolumeSpecName "kube-api-access-n28tn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:51:00 crc kubenswrapper[4757]: I1006 13:51:00.103245 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/74caa6de-7695-4dcb-9daf-f3368905de1c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "74caa6de-7695-4dcb-9daf-f3368905de1c" (UID: "74caa6de-7695-4dcb-9daf-f3368905de1c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:51:00 crc kubenswrapper[4757]: I1006 13:51:00.120275 4757 scope.go:117] "RemoveContainer" containerID="376643414d8cc06a7fe6f7258b4e3e6059a91369dca5d6165feac373605c482d" Oct 06 13:51:00 crc kubenswrapper[4757]: E1006 13:51:00.120815 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"376643414d8cc06a7fe6f7258b4e3e6059a91369dca5d6165feac373605c482d\": container with ID starting with 376643414d8cc06a7fe6f7258b4e3e6059a91369dca5d6165feac373605c482d not found: ID does not exist" containerID="376643414d8cc06a7fe6f7258b4e3e6059a91369dca5d6165feac373605c482d" Oct 06 13:51:00 crc kubenswrapper[4757]: I1006 13:51:00.120862 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"376643414d8cc06a7fe6f7258b4e3e6059a91369dca5d6165feac373605c482d"} err="failed to get container status \"376643414d8cc06a7fe6f7258b4e3e6059a91369dca5d6165feac373605c482d\": rpc error: code = NotFound desc = could not find container \"376643414d8cc06a7fe6f7258b4e3e6059a91369dca5d6165feac373605c482d\": container with ID starting with 376643414d8cc06a7fe6f7258b4e3e6059a91369dca5d6165feac373605c482d not found: ID does not exist" Oct 06 13:51:00 crc kubenswrapper[4757]: I1006 13:51:00.120890 4757 scope.go:117] "RemoveContainer" containerID="b6e369a0187661a348e078177f73131979eee2904487669e5986defb4251d5fa" Oct 06 13:51:00 crc kubenswrapper[4757]: I1006 13:51:00.138571 4757 scope.go:117] "RemoveContainer" containerID="b6e369a0187661a348e078177f73131979eee2904487669e5986defb4251d5fa" Oct 06 13:51:00 crc kubenswrapper[4757]: E1006 13:51:00.139600 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b6e369a0187661a348e078177f73131979eee2904487669e5986defb4251d5fa\": container with ID starting with b6e369a0187661a348e078177f73131979eee2904487669e5986defb4251d5fa not found: ID does not exist" containerID="b6e369a0187661a348e078177f73131979eee2904487669e5986defb4251d5fa" Oct 06 13:51:00 crc kubenswrapper[4757]: I1006 13:51:00.139644 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b6e369a0187661a348e078177f73131979eee2904487669e5986defb4251d5fa"} err="failed to get container status \"b6e369a0187661a348e078177f73131979eee2904487669e5986defb4251d5fa\": rpc error: code = NotFound desc = could not find container \"b6e369a0187661a348e078177f73131979eee2904487669e5986defb4251d5fa\": container with ID starting with b6e369a0187661a348e078177f73131979eee2904487669e5986defb4251d5fa not found: ID does not exist" Oct 06 13:51:00 crc kubenswrapper[4757]: I1006 13:51:00.197960 4757 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3d4a2fb3-0d5a-4be4-afe7-acc6e04917f9-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 06 13:51:00 crc kubenswrapper[4757]: I1006 13:51:00.197982 4757 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/74caa6de-7695-4dcb-9daf-f3368905de1c-client-ca\") on node \"crc\" DevicePath \"\"" Oct 06 13:51:00 crc kubenswrapper[4757]: I1006 13:51:00.197991 4757 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/3d4a2fb3-0d5a-4be4-afe7-acc6e04917f9-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 06 13:51:00 crc kubenswrapper[4757]: I1006 13:51:00.198001 4757 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3d4a2fb3-0d5a-4be4-afe7-acc6e04917f9-config\") on node \"crc\" DevicePath \"\"" Oct 06 13:51:00 crc kubenswrapper[4757]: I1006 13:51:00.198010 4757 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/74caa6de-7695-4dcb-9daf-f3368905de1c-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 06 13:51:00 crc kubenswrapper[4757]: I1006 13:51:00.198018 4757 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/74caa6de-7695-4dcb-9daf-f3368905de1c-config\") on node \"crc\" DevicePath \"\"" Oct 06 13:51:00 crc kubenswrapper[4757]: I1006 13:51:00.198027 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p7kbs\" (UniqueName: \"kubernetes.io/projected/74caa6de-7695-4dcb-9daf-f3368905de1c-kube-api-access-p7kbs\") on node \"crc\" DevicePath \"\"" Oct 06 13:51:00 crc kubenswrapper[4757]: I1006 13:51:00.198037 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n28tn\" (UniqueName: \"kubernetes.io/projected/3d4a2fb3-0d5a-4be4-afe7-acc6e04917f9-kube-api-access-n28tn\") on node \"crc\" DevicePath \"\"" Oct 06 13:51:00 crc kubenswrapper[4757]: I1006 13:51:00.198044 4757 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3d4a2fb3-0d5a-4be4-afe7-acc6e04917f9-client-ca\") on node \"crc\" DevicePath \"\"" Oct 06 13:51:00 crc kubenswrapper[4757]: I1006 13:51:00.412196 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-ssqcq"] Oct 06 13:51:00 crc kubenswrapper[4757]: I1006 13:51:00.414908 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-ssqcq"] Oct 06 13:51:00 crc kubenswrapper[4757]: I1006 13:51:00.422445 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-np4zr"] Oct 06 13:51:00 crc kubenswrapper[4757]: I1006 13:51:00.429560 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-np4zr"] Oct 06 13:51:01 crc kubenswrapper[4757]: I1006 13:51:01.375044 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-64cb69c7df-lgkd6"] Oct 06 13:51:01 crc kubenswrapper[4757]: E1006 13:51:01.375384 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d4a2fb3-0d5a-4be4-afe7-acc6e04917f9" containerName="controller-manager" Oct 06 13:51:01 crc kubenswrapper[4757]: I1006 13:51:01.375403 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d4a2fb3-0d5a-4be4-afe7-acc6e04917f9" containerName="controller-manager" Oct 06 13:51:01 crc kubenswrapper[4757]: E1006 13:51:01.375413 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="74caa6de-7695-4dcb-9daf-f3368905de1c" containerName="route-controller-manager" Oct 06 13:51:01 crc kubenswrapper[4757]: I1006 13:51:01.375421 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="74caa6de-7695-4dcb-9daf-f3368905de1c" containerName="route-controller-manager" Oct 06 13:51:01 crc kubenswrapper[4757]: I1006 13:51:01.375562 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="74caa6de-7695-4dcb-9daf-f3368905de1c" containerName="route-controller-manager" Oct 06 13:51:01 crc kubenswrapper[4757]: I1006 13:51:01.375576 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="3d4a2fb3-0d5a-4be4-afe7-acc6e04917f9" containerName="controller-manager" Oct 06 13:51:01 crc kubenswrapper[4757]: I1006 13:51:01.376111 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-64cb69c7df-lgkd6" Oct 06 13:51:01 crc kubenswrapper[4757]: I1006 13:51:01.379540 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Oct 06 13:51:01 crc kubenswrapper[4757]: I1006 13:51:01.379776 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Oct 06 13:51:01 crc kubenswrapper[4757]: I1006 13:51:01.380048 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Oct 06 13:51:01 crc kubenswrapper[4757]: I1006 13:51:01.380264 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Oct 06 13:51:01 crc kubenswrapper[4757]: I1006 13:51:01.380935 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Oct 06 13:51:01 crc kubenswrapper[4757]: I1006 13:51:01.381087 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Oct 06 13:51:01 crc kubenswrapper[4757]: I1006 13:51:01.381710 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-86bfdbbdcd-8dbxj"] Oct 06 13:51:01 crc kubenswrapper[4757]: I1006 13:51:01.382529 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-86bfdbbdcd-8dbxj" Oct 06 13:51:01 crc kubenswrapper[4757]: I1006 13:51:01.384242 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Oct 06 13:51:01 crc kubenswrapper[4757]: I1006 13:51:01.384528 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Oct 06 13:51:01 crc kubenswrapper[4757]: I1006 13:51:01.384924 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Oct 06 13:51:01 crc kubenswrapper[4757]: I1006 13:51:01.385253 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Oct 06 13:51:01 crc kubenswrapper[4757]: I1006 13:51:01.385965 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Oct 06 13:51:01 crc kubenswrapper[4757]: I1006 13:51:01.386153 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Oct 06 13:51:01 crc kubenswrapper[4757]: I1006 13:51:01.391583 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-64cb69c7df-lgkd6"] Oct 06 13:51:01 crc kubenswrapper[4757]: I1006 13:51:01.394825 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Oct 06 13:51:01 crc kubenswrapper[4757]: I1006 13:51:01.395785 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-86bfdbbdcd-8dbxj"] Oct 06 13:51:01 crc kubenswrapper[4757]: I1006 13:51:01.414631 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d451ba92-c4ce-400e-a817-5a8cec2e845f-client-ca\") pod \"route-controller-manager-64cb69c7df-lgkd6\" (UID: \"d451ba92-c4ce-400e-a817-5a8cec2e845f\") " pod="openshift-route-controller-manager/route-controller-manager-64cb69c7df-lgkd6" Oct 06 13:51:01 crc kubenswrapper[4757]: I1006 13:51:01.414674 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d451ba92-c4ce-400e-a817-5a8cec2e845f-config\") pod \"route-controller-manager-64cb69c7df-lgkd6\" (UID: \"d451ba92-c4ce-400e-a817-5a8cec2e845f\") " pod="openshift-route-controller-manager/route-controller-manager-64cb69c7df-lgkd6" Oct 06 13:51:01 crc kubenswrapper[4757]: I1006 13:51:01.414693 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b9cch\" (UniqueName: \"kubernetes.io/projected/36b2a9d5-aca1-4aee-a696-eb0cdfe87b7f-kube-api-access-b9cch\") pod \"controller-manager-86bfdbbdcd-8dbxj\" (UID: \"36b2a9d5-aca1-4aee-a696-eb0cdfe87b7f\") " pod="openshift-controller-manager/controller-manager-86bfdbbdcd-8dbxj" Oct 06 13:51:01 crc kubenswrapper[4757]: I1006 13:51:01.414712 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w9gv7\" (UniqueName: \"kubernetes.io/projected/d451ba92-c4ce-400e-a817-5a8cec2e845f-kube-api-access-w9gv7\") pod \"route-controller-manager-64cb69c7df-lgkd6\" (UID: \"d451ba92-c4ce-400e-a817-5a8cec2e845f\") " pod="openshift-route-controller-manager/route-controller-manager-64cb69c7df-lgkd6" Oct 06 13:51:01 crc kubenswrapper[4757]: I1006 13:51:01.414730 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d451ba92-c4ce-400e-a817-5a8cec2e845f-serving-cert\") pod \"route-controller-manager-64cb69c7df-lgkd6\" (UID: \"d451ba92-c4ce-400e-a817-5a8cec2e845f\") " pod="openshift-route-controller-manager/route-controller-manager-64cb69c7df-lgkd6" Oct 06 13:51:01 crc kubenswrapper[4757]: I1006 13:51:01.414770 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/36b2a9d5-aca1-4aee-a696-eb0cdfe87b7f-config\") pod \"controller-manager-86bfdbbdcd-8dbxj\" (UID: \"36b2a9d5-aca1-4aee-a696-eb0cdfe87b7f\") " pod="openshift-controller-manager/controller-manager-86bfdbbdcd-8dbxj" Oct 06 13:51:01 crc kubenswrapper[4757]: I1006 13:51:01.414791 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/36b2a9d5-aca1-4aee-a696-eb0cdfe87b7f-client-ca\") pod \"controller-manager-86bfdbbdcd-8dbxj\" (UID: \"36b2a9d5-aca1-4aee-a696-eb0cdfe87b7f\") " pod="openshift-controller-manager/controller-manager-86bfdbbdcd-8dbxj" Oct 06 13:51:01 crc kubenswrapper[4757]: I1006 13:51:01.414831 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/36b2a9d5-aca1-4aee-a696-eb0cdfe87b7f-proxy-ca-bundles\") pod \"controller-manager-86bfdbbdcd-8dbxj\" (UID: \"36b2a9d5-aca1-4aee-a696-eb0cdfe87b7f\") " pod="openshift-controller-manager/controller-manager-86bfdbbdcd-8dbxj" Oct 06 13:51:01 crc kubenswrapper[4757]: I1006 13:51:01.414853 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/36b2a9d5-aca1-4aee-a696-eb0cdfe87b7f-serving-cert\") pod \"controller-manager-86bfdbbdcd-8dbxj\" (UID: \"36b2a9d5-aca1-4aee-a696-eb0cdfe87b7f\") " pod="openshift-controller-manager/controller-manager-86bfdbbdcd-8dbxj" Oct 06 13:51:01 crc kubenswrapper[4757]: I1006 13:51:01.515973 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/36b2a9d5-aca1-4aee-a696-eb0cdfe87b7f-config\") pod \"controller-manager-86bfdbbdcd-8dbxj\" (UID: \"36b2a9d5-aca1-4aee-a696-eb0cdfe87b7f\") " pod="openshift-controller-manager/controller-manager-86bfdbbdcd-8dbxj" Oct 06 13:51:01 crc kubenswrapper[4757]: I1006 13:51:01.516904 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/36b2a9d5-aca1-4aee-a696-eb0cdfe87b7f-client-ca\") pod \"controller-manager-86bfdbbdcd-8dbxj\" (UID: \"36b2a9d5-aca1-4aee-a696-eb0cdfe87b7f\") " pod="openshift-controller-manager/controller-manager-86bfdbbdcd-8dbxj" Oct 06 13:51:01 crc kubenswrapper[4757]: I1006 13:51:01.517125 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/36b2a9d5-aca1-4aee-a696-eb0cdfe87b7f-proxy-ca-bundles\") pod \"controller-manager-86bfdbbdcd-8dbxj\" (UID: \"36b2a9d5-aca1-4aee-a696-eb0cdfe87b7f\") " pod="openshift-controller-manager/controller-manager-86bfdbbdcd-8dbxj" Oct 06 13:51:01 crc kubenswrapper[4757]: I1006 13:51:01.517301 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/36b2a9d5-aca1-4aee-a696-eb0cdfe87b7f-serving-cert\") pod \"controller-manager-86bfdbbdcd-8dbxj\" (UID: \"36b2a9d5-aca1-4aee-a696-eb0cdfe87b7f\") " pod="openshift-controller-manager/controller-manager-86bfdbbdcd-8dbxj" Oct 06 13:51:01 crc kubenswrapper[4757]: I1006 13:51:01.517453 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d451ba92-c4ce-400e-a817-5a8cec2e845f-config\") pod \"route-controller-manager-64cb69c7df-lgkd6\" (UID: \"d451ba92-c4ce-400e-a817-5a8cec2e845f\") " pod="openshift-route-controller-manager/route-controller-manager-64cb69c7df-lgkd6" Oct 06 13:51:01 crc kubenswrapper[4757]: I1006 13:51:01.517571 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d451ba92-c4ce-400e-a817-5a8cec2e845f-client-ca\") pod \"route-controller-manager-64cb69c7df-lgkd6\" (UID: \"d451ba92-c4ce-400e-a817-5a8cec2e845f\") " pod="openshift-route-controller-manager/route-controller-manager-64cb69c7df-lgkd6" Oct 06 13:51:01 crc kubenswrapper[4757]: I1006 13:51:01.518656 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b9cch\" (UniqueName: \"kubernetes.io/projected/36b2a9d5-aca1-4aee-a696-eb0cdfe87b7f-kube-api-access-b9cch\") pod \"controller-manager-86bfdbbdcd-8dbxj\" (UID: \"36b2a9d5-aca1-4aee-a696-eb0cdfe87b7f\") " pod="openshift-controller-manager/controller-manager-86bfdbbdcd-8dbxj" Oct 06 13:51:01 crc kubenswrapper[4757]: I1006 13:51:01.518803 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w9gv7\" (UniqueName: \"kubernetes.io/projected/d451ba92-c4ce-400e-a817-5a8cec2e845f-kube-api-access-w9gv7\") pod \"route-controller-manager-64cb69c7df-lgkd6\" (UID: \"d451ba92-c4ce-400e-a817-5a8cec2e845f\") " pod="openshift-route-controller-manager/route-controller-manager-64cb69c7df-lgkd6" Oct 06 13:51:01 crc kubenswrapper[4757]: I1006 13:51:01.518898 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d451ba92-c4ce-400e-a817-5a8cec2e845f-serving-cert\") pod \"route-controller-manager-64cb69c7df-lgkd6\" (UID: \"d451ba92-c4ce-400e-a817-5a8cec2e845f\") " pod="openshift-route-controller-manager/route-controller-manager-64cb69c7df-lgkd6" Oct 06 13:51:01 crc kubenswrapper[4757]: I1006 13:51:01.518335 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/36b2a9d5-aca1-4aee-a696-eb0cdfe87b7f-proxy-ca-bundles\") pod \"controller-manager-86bfdbbdcd-8dbxj\" (UID: \"36b2a9d5-aca1-4aee-a696-eb0cdfe87b7f\") " pod="openshift-controller-manager/controller-manager-86bfdbbdcd-8dbxj" Oct 06 13:51:01 crc kubenswrapper[4757]: I1006 13:51:01.517831 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/36b2a9d5-aca1-4aee-a696-eb0cdfe87b7f-client-ca\") pod \"controller-manager-86bfdbbdcd-8dbxj\" (UID: \"36b2a9d5-aca1-4aee-a696-eb0cdfe87b7f\") " pod="openshift-controller-manager/controller-manager-86bfdbbdcd-8dbxj" Oct 06 13:51:01 crc kubenswrapper[4757]: I1006 13:51:01.518606 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d451ba92-c4ce-400e-a817-5a8cec2e845f-client-ca\") pod \"route-controller-manager-64cb69c7df-lgkd6\" (UID: \"d451ba92-c4ce-400e-a817-5a8cec2e845f\") " pod="openshift-route-controller-manager/route-controller-manager-64cb69c7df-lgkd6" Oct 06 13:51:01 crc kubenswrapper[4757]: I1006 13:51:01.520450 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/36b2a9d5-aca1-4aee-a696-eb0cdfe87b7f-config\") pod \"controller-manager-86bfdbbdcd-8dbxj\" (UID: \"36b2a9d5-aca1-4aee-a696-eb0cdfe87b7f\") " pod="openshift-controller-manager/controller-manager-86bfdbbdcd-8dbxj" Oct 06 13:51:01 crc kubenswrapper[4757]: I1006 13:51:01.522907 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d451ba92-c4ce-400e-a817-5a8cec2e845f-config\") pod \"route-controller-manager-64cb69c7df-lgkd6\" (UID: \"d451ba92-c4ce-400e-a817-5a8cec2e845f\") " pod="openshift-route-controller-manager/route-controller-manager-64cb69c7df-lgkd6" Oct 06 13:51:01 crc kubenswrapper[4757]: I1006 13:51:01.524249 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d451ba92-c4ce-400e-a817-5a8cec2e845f-serving-cert\") pod \"route-controller-manager-64cb69c7df-lgkd6\" (UID: \"d451ba92-c4ce-400e-a817-5a8cec2e845f\") " pod="openshift-route-controller-manager/route-controller-manager-64cb69c7df-lgkd6" Oct 06 13:51:01 crc kubenswrapper[4757]: I1006 13:51:01.526469 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/36b2a9d5-aca1-4aee-a696-eb0cdfe87b7f-serving-cert\") pod \"controller-manager-86bfdbbdcd-8dbxj\" (UID: \"36b2a9d5-aca1-4aee-a696-eb0cdfe87b7f\") " pod="openshift-controller-manager/controller-manager-86bfdbbdcd-8dbxj" Oct 06 13:51:01 crc kubenswrapper[4757]: I1006 13:51:01.542805 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w9gv7\" (UniqueName: \"kubernetes.io/projected/d451ba92-c4ce-400e-a817-5a8cec2e845f-kube-api-access-w9gv7\") pod \"route-controller-manager-64cb69c7df-lgkd6\" (UID: \"d451ba92-c4ce-400e-a817-5a8cec2e845f\") " pod="openshift-route-controller-manager/route-controller-manager-64cb69c7df-lgkd6" Oct 06 13:51:01 crc kubenswrapper[4757]: I1006 13:51:01.547356 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b9cch\" (UniqueName: \"kubernetes.io/projected/36b2a9d5-aca1-4aee-a696-eb0cdfe87b7f-kube-api-access-b9cch\") pod \"controller-manager-86bfdbbdcd-8dbxj\" (UID: \"36b2a9d5-aca1-4aee-a696-eb0cdfe87b7f\") " pod="openshift-controller-manager/controller-manager-86bfdbbdcd-8dbxj" Oct 06 13:51:01 crc kubenswrapper[4757]: I1006 13:51:01.696945 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-64cb69c7df-lgkd6" Oct 06 13:51:01 crc kubenswrapper[4757]: I1006 13:51:01.704790 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-86bfdbbdcd-8dbxj" Oct 06 13:51:01 crc kubenswrapper[4757]: I1006 13:51:01.963370 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-86bfdbbdcd-8dbxj"] Oct 06 13:51:02 crc kubenswrapper[4757]: I1006 13:51:02.112669 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-86bfdbbdcd-8dbxj" event={"ID":"36b2a9d5-aca1-4aee-a696-eb0cdfe87b7f","Type":"ContainerStarted","Data":"99fc8a7d3407d5ca3ddea84808e4482e35020b78abe3f2cfd06eddc336e12236"} Oct 06 13:51:02 crc kubenswrapper[4757]: I1006 13:51:02.189795 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3d4a2fb3-0d5a-4be4-afe7-acc6e04917f9" path="/var/lib/kubelet/pods/3d4a2fb3-0d5a-4be4-afe7-acc6e04917f9/volumes" Oct 06 13:51:02 crc kubenswrapper[4757]: I1006 13:51:02.190548 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="74caa6de-7695-4dcb-9daf-f3368905de1c" path="/var/lib/kubelet/pods/74caa6de-7695-4dcb-9daf-f3368905de1c/volumes" Oct 06 13:51:02 crc kubenswrapper[4757]: I1006 13:51:02.205084 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-64cb69c7df-lgkd6"] Oct 06 13:51:02 crc kubenswrapper[4757]: W1006 13:51:02.208708 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd451ba92_c4ce_400e_a817_5a8cec2e845f.slice/crio-a03971d02c07a4639156426737e16374d59c71b78d9f4468de5774d31cdab846 WatchSource:0}: Error finding container a03971d02c07a4639156426737e16374d59c71b78d9f4468de5774d31cdab846: Status 404 returned error can't find the container with id a03971d02c07a4639156426737e16374d59c71b78d9f4468de5774d31cdab846 Oct 06 13:51:02 crc kubenswrapper[4757]: I1006 13:51:02.897181 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2f5lgl"] Oct 06 13:51:02 crc kubenswrapper[4757]: I1006 13:51:02.898523 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2f5lgl" Oct 06 13:51:02 crc kubenswrapper[4757]: I1006 13:51:02.900010 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Oct 06 13:51:02 crc kubenswrapper[4757]: I1006 13:51:02.907958 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2f5lgl"] Oct 06 13:51:03 crc kubenswrapper[4757]: I1006 13:51:03.041161 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7p58j\" (UniqueName: \"kubernetes.io/projected/e2bebc59-e950-44d3-9b34-e9ae757735ed-kube-api-access-7p58j\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2f5lgl\" (UID: \"e2bebc59-e950-44d3-9b34-e9ae757735ed\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2f5lgl" Oct 06 13:51:03 crc kubenswrapper[4757]: I1006 13:51:03.041224 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e2bebc59-e950-44d3-9b34-e9ae757735ed-util\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2f5lgl\" (UID: \"e2bebc59-e950-44d3-9b34-e9ae757735ed\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2f5lgl" Oct 06 13:51:03 crc kubenswrapper[4757]: I1006 13:51:03.041251 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e2bebc59-e950-44d3-9b34-e9ae757735ed-bundle\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2f5lgl\" (UID: \"e2bebc59-e950-44d3-9b34-e9ae757735ed\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2f5lgl" Oct 06 13:51:03 crc kubenswrapper[4757]: I1006 13:51:03.069676 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-xbn64" podUID="0345b748-8161-40b5-bec8-0c36c2d87ea3" containerName="console" containerID="cri-o://0ba4255813842017f410e1590e4c8826264cf5b3878386c834741f5e9359b554" gracePeriod=15 Oct 06 13:51:03 crc kubenswrapper[4757]: I1006 13:51:03.131802 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-86bfdbbdcd-8dbxj" event={"ID":"36b2a9d5-aca1-4aee-a696-eb0cdfe87b7f","Type":"ContainerStarted","Data":"5fe6262dfdb7deb147cec4e75fd9f741fd05b821352eb63679feb35e426fcd05"} Oct 06 13:51:03 crc kubenswrapper[4757]: I1006 13:51:03.133392 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-86bfdbbdcd-8dbxj" Oct 06 13:51:03 crc kubenswrapper[4757]: I1006 13:51:03.139401 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-64cb69c7df-lgkd6" event={"ID":"d451ba92-c4ce-400e-a817-5a8cec2e845f","Type":"ContainerStarted","Data":"8be4e39b34f9066480fc9d3130e4e31bcba8f3845e66337fa142275dc3236ca9"} Oct 06 13:51:03 crc kubenswrapper[4757]: I1006 13:51:03.139444 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-64cb69c7df-lgkd6" event={"ID":"d451ba92-c4ce-400e-a817-5a8cec2e845f","Type":"ContainerStarted","Data":"a03971d02c07a4639156426737e16374d59c71b78d9f4468de5774d31cdab846"} Oct 06 13:51:03 crc kubenswrapper[4757]: I1006 13:51:03.139592 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-86bfdbbdcd-8dbxj" Oct 06 13:51:03 crc kubenswrapper[4757]: I1006 13:51:03.139742 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-64cb69c7df-lgkd6" Oct 06 13:51:03 crc kubenswrapper[4757]: I1006 13:51:03.142069 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e2bebc59-e950-44d3-9b34-e9ae757735ed-util\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2f5lgl\" (UID: \"e2bebc59-e950-44d3-9b34-e9ae757735ed\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2f5lgl" Oct 06 13:51:03 crc kubenswrapper[4757]: I1006 13:51:03.142176 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e2bebc59-e950-44d3-9b34-e9ae757735ed-bundle\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2f5lgl\" (UID: \"e2bebc59-e950-44d3-9b34-e9ae757735ed\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2f5lgl" Oct 06 13:51:03 crc kubenswrapper[4757]: I1006 13:51:03.142335 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7p58j\" (UniqueName: \"kubernetes.io/projected/e2bebc59-e950-44d3-9b34-e9ae757735ed-kube-api-access-7p58j\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2f5lgl\" (UID: \"e2bebc59-e950-44d3-9b34-e9ae757735ed\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2f5lgl" Oct 06 13:51:03 crc kubenswrapper[4757]: I1006 13:51:03.142751 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e2bebc59-e950-44d3-9b34-e9ae757735ed-util\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2f5lgl\" (UID: \"e2bebc59-e950-44d3-9b34-e9ae757735ed\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2f5lgl" Oct 06 13:51:03 crc kubenswrapper[4757]: I1006 13:51:03.142787 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e2bebc59-e950-44d3-9b34-e9ae757735ed-bundle\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2f5lgl\" (UID: \"e2bebc59-e950-44d3-9b34-e9ae757735ed\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2f5lgl" Oct 06 13:51:03 crc kubenswrapper[4757]: I1006 13:51:03.145775 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-64cb69c7df-lgkd6" Oct 06 13:51:03 crc kubenswrapper[4757]: I1006 13:51:03.158114 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-86bfdbbdcd-8dbxj" podStartSLOduration=4.158076176 podStartE2EDuration="4.158076176s" podCreationTimestamp="2025-10-06 13:50:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:51:03.154408367 +0000 UTC m=+751.651726914" watchObservedRunningTime="2025-10-06 13:51:03.158076176 +0000 UTC m=+751.655394723" Oct 06 13:51:03 crc kubenswrapper[4757]: I1006 13:51:03.176863 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-64cb69c7df-lgkd6" podStartSLOduration=4.176847125 podStartE2EDuration="4.176847125s" podCreationTimestamp="2025-10-06 13:50:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:51:03.173984742 +0000 UTC m=+751.671303289" watchObservedRunningTime="2025-10-06 13:51:03.176847125 +0000 UTC m=+751.674165652" Oct 06 13:51:03 crc kubenswrapper[4757]: I1006 13:51:03.178008 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7p58j\" (UniqueName: \"kubernetes.io/projected/e2bebc59-e950-44d3-9b34-e9ae757735ed-kube-api-access-7p58j\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2f5lgl\" (UID: \"e2bebc59-e950-44d3-9b34-e9ae757735ed\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2f5lgl" Oct 06 13:51:03 crc kubenswrapper[4757]: I1006 13:51:03.245418 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2f5lgl" Oct 06 13:51:03 crc kubenswrapper[4757]: I1006 13:51:03.282512 4757 patch_prober.go:28] interesting pod/console-f9d7485db-xbn64 container/console namespace/openshift-console: Readiness probe status=failure output="Get \"https://10.217.0.14:8443/health\": dial tcp 10.217.0.14:8443: connect: connection refused" start-of-body= Oct 06 13:51:03 crc kubenswrapper[4757]: I1006 13:51:03.282725 4757 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/console-f9d7485db-xbn64" podUID="0345b748-8161-40b5-bec8-0c36c2d87ea3" containerName="console" probeResult="failure" output="Get \"https://10.217.0.14:8443/health\": dial tcp 10.217.0.14:8443: connect: connection refused" Oct 06 13:51:03 crc kubenswrapper[4757]: I1006 13:51:03.565603 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-xbn64_0345b748-8161-40b5-bec8-0c36c2d87ea3/console/0.log" Oct 06 13:51:03 crc kubenswrapper[4757]: I1006 13:51:03.566026 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-xbn64" Oct 06 13:51:03 crc kubenswrapper[4757]: I1006 13:51:03.586676 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2f5lgl"] Oct 06 13:51:03 crc kubenswrapper[4757]: W1006 13:51:03.592922 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode2bebc59_e950_44d3_9b34_e9ae757735ed.slice/crio-9b7a36d3ccb7c5bfbc986bd5824ba3c02b32446a537cbaf123a97917eba93201 WatchSource:0}: Error finding container 9b7a36d3ccb7c5bfbc986bd5824ba3c02b32446a537cbaf123a97917eba93201: Status 404 returned error can't find the container with id 9b7a36d3ccb7c5bfbc986bd5824ba3c02b32446a537cbaf123a97917eba93201 Oct 06 13:51:03 crc kubenswrapper[4757]: I1006 13:51:03.749809 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/0345b748-8161-40b5-bec8-0c36c2d87ea3-console-config\") pod \"0345b748-8161-40b5-bec8-0c36c2d87ea3\" (UID: \"0345b748-8161-40b5-bec8-0c36c2d87ea3\") " Oct 06 13:51:03 crc kubenswrapper[4757]: I1006 13:51:03.749853 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/0345b748-8161-40b5-bec8-0c36c2d87ea3-oauth-serving-cert\") pod \"0345b748-8161-40b5-bec8-0c36c2d87ea3\" (UID: \"0345b748-8161-40b5-bec8-0c36c2d87ea3\") " Oct 06 13:51:03 crc kubenswrapper[4757]: I1006 13:51:03.749896 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0345b748-8161-40b5-bec8-0c36c2d87ea3-service-ca\") pod \"0345b748-8161-40b5-bec8-0c36c2d87ea3\" (UID: \"0345b748-8161-40b5-bec8-0c36c2d87ea3\") " Oct 06 13:51:03 crc kubenswrapper[4757]: I1006 13:51:03.749941 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/0345b748-8161-40b5-bec8-0c36c2d87ea3-console-oauth-config\") pod \"0345b748-8161-40b5-bec8-0c36c2d87ea3\" (UID: \"0345b748-8161-40b5-bec8-0c36c2d87ea3\") " Oct 06 13:51:03 crc kubenswrapper[4757]: I1006 13:51:03.749968 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/0345b748-8161-40b5-bec8-0c36c2d87ea3-console-serving-cert\") pod \"0345b748-8161-40b5-bec8-0c36c2d87ea3\" (UID: \"0345b748-8161-40b5-bec8-0c36c2d87ea3\") " Oct 06 13:51:03 crc kubenswrapper[4757]: I1006 13:51:03.750023 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jstcl\" (UniqueName: \"kubernetes.io/projected/0345b748-8161-40b5-bec8-0c36c2d87ea3-kube-api-access-jstcl\") pod \"0345b748-8161-40b5-bec8-0c36c2d87ea3\" (UID: \"0345b748-8161-40b5-bec8-0c36c2d87ea3\") " Oct 06 13:51:03 crc kubenswrapper[4757]: I1006 13:51:03.750044 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0345b748-8161-40b5-bec8-0c36c2d87ea3-trusted-ca-bundle\") pod \"0345b748-8161-40b5-bec8-0c36c2d87ea3\" (UID: \"0345b748-8161-40b5-bec8-0c36c2d87ea3\") " Oct 06 13:51:03 crc kubenswrapper[4757]: I1006 13:51:03.751314 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0345b748-8161-40b5-bec8-0c36c2d87ea3-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "0345b748-8161-40b5-bec8-0c36c2d87ea3" (UID: "0345b748-8161-40b5-bec8-0c36c2d87ea3"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:51:03 crc kubenswrapper[4757]: I1006 13:51:03.751339 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0345b748-8161-40b5-bec8-0c36c2d87ea3-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "0345b748-8161-40b5-bec8-0c36c2d87ea3" (UID: "0345b748-8161-40b5-bec8-0c36c2d87ea3"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:51:03 crc kubenswrapper[4757]: I1006 13:51:03.751349 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0345b748-8161-40b5-bec8-0c36c2d87ea3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0345b748-8161-40b5-bec8-0c36c2d87ea3" (UID: "0345b748-8161-40b5-bec8-0c36c2d87ea3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:51:03 crc kubenswrapper[4757]: I1006 13:51:03.751995 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0345b748-8161-40b5-bec8-0c36c2d87ea3-console-config" (OuterVolumeSpecName: "console-config") pod "0345b748-8161-40b5-bec8-0c36c2d87ea3" (UID: "0345b748-8161-40b5-bec8-0c36c2d87ea3"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:51:03 crc kubenswrapper[4757]: I1006 13:51:03.757950 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0345b748-8161-40b5-bec8-0c36c2d87ea3-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "0345b748-8161-40b5-bec8-0c36c2d87ea3" (UID: "0345b748-8161-40b5-bec8-0c36c2d87ea3"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:51:03 crc kubenswrapper[4757]: I1006 13:51:03.757988 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0345b748-8161-40b5-bec8-0c36c2d87ea3-kube-api-access-jstcl" (OuterVolumeSpecName: "kube-api-access-jstcl") pod "0345b748-8161-40b5-bec8-0c36c2d87ea3" (UID: "0345b748-8161-40b5-bec8-0c36c2d87ea3"). InnerVolumeSpecName "kube-api-access-jstcl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:51:03 crc kubenswrapper[4757]: I1006 13:51:03.758237 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0345b748-8161-40b5-bec8-0c36c2d87ea3-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "0345b748-8161-40b5-bec8-0c36c2d87ea3" (UID: "0345b748-8161-40b5-bec8-0c36c2d87ea3"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:51:03 crc kubenswrapper[4757]: I1006 13:51:03.852293 4757 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0345b748-8161-40b5-bec8-0c36c2d87ea3-service-ca\") on node \"crc\" DevicePath \"\"" Oct 06 13:51:03 crc kubenswrapper[4757]: I1006 13:51:03.852327 4757 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/0345b748-8161-40b5-bec8-0c36c2d87ea3-console-oauth-config\") on node \"crc\" DevicePath \"\"" Oct 06 13:51:03 crc kubenswrapper[4757]: I1006 13:51:03.852339 4757 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/0345b748-8161-40b5-bec8-0c36c2d87ea3-console-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 06 13:51:03 crc kubenswrapper[4757]: I1006 13:51:03.852348 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jstcl\" (UniqueName: \"kubernetes.io/projected/0345b748-8161-40b5-bec8-0c36c2d87ea3-kube-api-access-jstcl\") on node \"crc\" DevicePath \"\"" Oct 06 13:51:03 crc kubenswrapper[4757]: I1006 13:51:03.852357 4757 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0345b748-8161-40b5-bec8-0c36c2d87ea3-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 06 13:51:03 crc kubenswrapper[4757]: I1006 13:51:03.852365 4757 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/0345b748-8161-40b5-bec8-0c36c2d87ea3-console-config\") on node \"crc\" DevicePath \"\"" Oct 06 13:51:03 crc kubenswrapper[4757]: I1006 13:51:03.852373 4757 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/0345b748-8161-40b5-bec8-0c36c2d87ea3-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 06 13:51:04 crc kubenswrapper[4757]: I1006 13:51:04.147184 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-xbn64_0345b748-8161-40b5-bec8-0c36c2d87ea3/console/0.log" Oct 06 13:51:04 crc kubenswrapper[4757]: I1006 13:51:04.147597 4757 generic.go:334] "Generic (PLEG): container finished" podID="0345b748-8161-40b5-bec8-0c36c2d87ea3" containerID="0ba4255813842017f410e1590e4c8826264cf5b3878386c834741f5e9359b554" exitCode=2 Oct 06 13:51:04 crc kubenswrapper[4757]: I1006 13:51:04.147665 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-xbn64" event={"ID":"0345b748-8161-40b5-bec8-0c36c2d87ea3","Type":"ContainerDied","Data":"0ba4255813842017f410e1590e4c8826264cf5b3878386c834741f5e9359b554"} Oct 06 13:51:04 crc kubenswrapper[4757]: I1006 13:51:04.147700 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-xbn64" event={"ID":"0345b748-8161-40b5-bec8-0c36c2d87ea3","Type":"ContainerDied","Data":"807b2c6022525d0d7a6521e60ee56f1fe8bb13b2d8f7d3e9233850f5e1ccc2d8"} Oct 06 13:51:04 crc kubenswrapper[4757]: I1006 13:51:04.147736 4757 scope.go:117] "RemoveContainer" containerID="0ba4255813842017f410e1590e4c8826264cf5b3878386c834741f5e9359b554" Oct 06 13:51:04 crc kubenswrapper[4757]: I1006 13:51:04.147744 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-xbn64" Oct 06 13:51:04 crc kubenswrapper[4757]: I1006 13:51:04.150908 4757 generic.go:334] "Generic (PLEG): container finished" podID="e2bebc59-e950-44d3-9b34-e9ae757735ed" containerID="f4a0375d3e04bd227ef10227d3825c1c3025d395e779c98d3a03650d6e5d4026" exitCode=0 Oct 06 13:51:04 crc kubenswrapper[4757]: I1006 13:51:04.150950 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2f5lgl" event={"ID":"e2bebc59-e950-44d3-9b34-e9ae757735ed","Type":"ContainerDied","Data":"f4a0375d3e04bd227ef10227d3825c1c3025d395e779c98d3a03650d6e5d4026"} Oct 06 13:51:04 crc kubenswrapper[4757]: I1006 13:51:04.150991 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2f5lgl" event={"ID":"e2bebc59-e950-44d3-9b34-e9ae757735ed","Type":"ContainerStarted","Data":"9b7a36d3ccb7c5bfbc986bd5824ba3c02b32446a537cbaf123a97917eba93201"} Oct 06 13:51:04 crc kubenswrapper[4757]: I1006 13:51:04.169031 4757 scope.go:117] "RemoveContainer" containerID="0ba4255813842017f410e1590e4c8826264cf5b3878386c834741f5e9359b554" Oct 06 13:51:04 crc kubenswrapper[4757]: E1006 13:51:04.170977 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0ba4255813842017f410e1590e4c8826264cf5b3878386c834741f5e9359b554\": container with ID starting with 0ba4255813842017f410e1590e4c8826264cf5b3878386c834741f5e9359b554 not found: ID does not exist" containerID="0ba4255813842017f410e1590e4c8826264cf5b3878386c834741f5e9359b554" Oct 06 13:51:04 crc kubenswrapper[4757]: I1006 13:51:04.171044 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0ba4255813842017f410e1590e4c8826264cf5b3878386c834741f5e9359b554"} err="failed to get container status \"0ba4255813842017f410e1590e4c8826264cf5b3878386c834741f5e9359b554\": rpc error: code = NotFound desc = could not find container \"0ba4255813842017f410e1590e4c8826264cf5b3878386c834741f5e9359b554\": container with ID starting with 0ba4255813842017f410e1590e4c8826264cf5b3878386c834741f5e9359b554 not found: ID does not exist" Oct 06 13:51:04 crc kubenswrapper[4757]: I1006 13:51:04.223479 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-xbn64"] Oct 06 13:51:04 crc kubenswrapper[4757]: I1006 13:51:04.226133 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-xbn64"] Oct 06 13:51:04 crc kubenswrapper[4757]: I1006 13:51:04.360884 4757 patch_prober.go:28] interesting pod/machine-config-daemon-7tb7h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 06 13:51:04 crc kubenswrapper[4757]: I1006 13:51:04.360936 4757 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 06 13:51:06 crc kubenswrapper[4757]: I1006 13:51:06.169216 4757 generic.go:334] "Generic (PLEG): container finished" podID="e2bebc59-e950-44d3-9b34-e9ae757735ed" containerID="115b2ca8d980dde326dd069cf8019ef3e943a61943857af5df8d9668212d829f" exitCode=0 Oct 06 13:51:06 crc kubenswrapper[4757]: I1006 13:51:06.169329 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2f5lgl" event={"ID":"e2bebc59-e950-44d3-9b34-e9ae757735ed","Type":"ContainerDied","Data":"115b2ca8d980dde326dd069cf8019ef3e943a61943857af5df8d9668212d829f"} Oct 06 13:51:06 crc kubenswrapper[4757]: I1006 13:51:06.189743 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0345b748-8161-40b5-bec8-0c36c2d87ea3" path="/var/lib/kubelet/pods/0345b748-8161-40b5-bec8-0c36c2d87ea3/volumes" Oct 06 13:51:07 crc kubenswrapper[4757]: I1006 13:51:07.187150 4757 generic.go:334] "Generic (PLEG): container finished" podID="e2bebc59-e950-44d3-9b34-e9ae757735ed" containerID="55351782fba2aa2d8533f7327db0b57e2409821f0bc76020ae1867854ff39b65" exitCode=0 Oct 06 13:51:07 crc kubenswrapper[4757]: I1006 13:51:07.187568 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2f5lgl" event={"ID":"e2bebc59-e950-44d3-9b34-e9ae757735ed","Type":"ContainerDied","Data":"55351782fba2aa2d8533f7327db0b57e2409821f0bc76020ae1867854ff39b65"} Oct 06 13:51:08 crc kubenswrapper[4757]: I1006 13:51:08.529648 4757 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Oct 06 13:51:08 crc kubenswrapper[4757]: I1006 13:51:08.584197 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2f5lgl" Oct 06 13:51:08 crc kubenswrapper[4757]: I1006 13:51:08.621642 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e2bebc59-e950-44d3-9b34-e9ae757735ed-bundle\") pod \"e2bebc59-e950-44d3-9b34-e9ae757735ed\" (UID: \"e2bebc59-e950-44d3-9b34-e9ae757735ed\") " Oct 06 13:51:08 crc kubenswrapper[4757]: I1006 13:51:08.621697 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e2bebc59-e950-44d3-9b34-e9ae757735ed-util\") pod \"e2bebc59-e950-44d3-9b34-e9ae757735ed\" (UID: \"e2bebc59-e950-44d3-9b34-e9ae757735ed\") " Oct 06 13:51:08 crc kubenswrapper[4757]: I1006 13:51:08.621739 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7p58j\" (UniqueName: \"kubernetes.io/projected/e2bebc59-e950-44d3-9b34-e9ae757735ed-kube-api-access-7p58j\") pod \"e2bebc59-e950-44d3-9b34-e9ae757735ed\" (UID: \"e2bebc59-e950-44d3-9b34-e9ae757735ed\") " Oct 06 13:51:08 crc kubenswrapper[4757]: I1006 13:51:08.622755 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e2bebc59-e950-44d3-9b34-e9ae757735ed-bundle" (OuterVolumeSpecName: "bundle") pod "e2bebc59-e950-44d3-9b34-e9ae757735ed" (UID: "e2bebc59-e950-44d3-9b34-e9ae757735ed"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 13:51:08 crc kubenswrapper[4757]: I1006 13:51:08.627892 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e2bebc59-e950-44d3-9b34-e9ae757735ed-kube-api-access-7p58j" (OuterVolumeSpecName: "kube-api-access-7p58j") pod "e2bebc59-e950-44d3-9b34-e9ae757735ed" (UID: "e2bebc59-e950-44d3-9b34-e9ae757735ed"). InnerVolumeSpecName "kube-api-access-7p58j". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:51:08 crc kubenswrapper[4757]: I1006 13:51:08.648118 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e2bebc59-e950-44d3-9b34-e9ae757735ed-util" (OuterVolumeSpecName: "util") pod "e2bebc59-e950-44d3-9b34-e9ae757735ed" (UID: "e2bebc59-e950-44d3-9b34-e9ae757735ed"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 13:51:08 crc kubenswrapper[4757]: I1006 13:51:08.723272 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7p58j\" (UniqueName: \"kubernetes.io/projected/e2bebc59-e950-44d3-9b34-e9ae757735ed-kube-api-access-7p58j\") on node \"crc\" DevicePath \"\"" Oct 06 13:51:08 crc kubenswrapper[4757]: I1006 13:51:08.723327 4757 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e2bebc59-e950-44d3-9b34-e9ae757735ed-bundle\") on node \"crc\" DevicePath \"\"" Oct 06 13:51:08 crc kubenswrapper[4757]: I1006 13:51:08.723339 4757 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e2bebc59-e950-44d3-9b34-e9ae757735ed-util\") on node \"crc\" DevicePath \"\"" Oct 06 13:51:08 crc kubenswrapper[4757]: I1006 13:51:08.765300 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-qzrbg"] Oct 06 13:51:08 crc kubenswrapper[4757]: E1006 13:51:08.765587 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e2bebc59-e950-44d3-9b34-e9ae757735ed" containerName="pull" Oct 06 13:51:08 crc kubenswrapper[4757]: I1006 13:51:08.765603 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="e2bebc59-e950-44d3-9b34-e9ae757735ed" containerName="pull" Oct 06 13:51:08 crc kubenswrapper[4757]: E1006 13:51:08.765620 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e2bebc59-e950-44d3-9b34-e9ae757735ed" containerName="util" Oct 06 13:51:08 crc kubenswrapper[4757]: I1006 13:51:08.765627 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="e2bebc59-e950-44d3-9b34-e9ae757735ed" containerName="util" Oct 06 13:51:08 crc kubenswrapper[4757]: E1006 13:51:08.765637 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e2bebc59-e950-44d3-9b34-e9ae757735ed" containerName="extract" Oct 06 13:51:08 crc kubenswrapper[4757]: I1006 13:51:08.765644 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="e2bebc59-e950-44d3-9b34-e9ae757735ed" containerName="extract" Oct 06 13:51:08 crc kubenswrapper[4757]: E1006 13:51:08.765655 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0345b748-8161-40b5-bec8-0c36c2d87ea3" containerName="console" Oct 06 13:51:08 crc kubenswrapper[4757]: I1006 13:51:08.765660 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="0345b748-8161-40b5-bec8-0c36c2d87ea3" containerName="console" Oct 06 13:51:08 crc kubenswrapper[4757]: I1006 13:51:08.765767 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="0345b748-8161-40b5-bec8-0c36c2d87ea3" containerName="console" Oct 06 13:51:08 crc kubenswrapper[4757]: I1006 13:51:08.765791 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="e2bebc59-e950-44d3-9b34-e9ae757735ed" containerName="extract" Oct 06 13:51:08 crc kubenswrapper[4757]: I1006 13:51:08.766582 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qzrbg" Oct 06 13:51:08 crc kubenswrapper[4757]: I1006 13:51:08.782138 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-qzrbg"] Oct 06 13:51:08 crc kubenswrapper[4757]: I1006 13:51:08.824729 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3a0188f2-1c86-444b-93c9-73509237da1a-catalog-content\") pod \"redhat-operators-qzrbg\" (UID: \"3a0188f2-1c86-444b-93c9-73509237da1a\") " pod="openshift-marketplace/redhat-operators-qzrbg" Oct 06 13:51:08 crc kubenswrapper[4757]: I1006 13:51:08.824898 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7h8w2\" (UniqueName: \"kubernetes.io/projected/3a0188f2-1c86-444b-93c9-73509237da1a-kube-api-access-7h8w2\") pod \"redhat-operators-qzrbg\" (UID: \"3a0188f2-1c86-444b-93c9-73509237da1a\") " pod="openshift-marketplace/redhat-operators-qzrbg" Oct 06 13:51:08 crc kubenswrapper[4757]: I1006 13:51:08.825110 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3a0188f2-1c86-444b-93c9-73509237da1a-utilities\") pod \"redhat-operators-qzrbg\" (UID: \"3a0188f2-1c86-444b-93c9-73509237da1a\") " pod="openshift-marketplace/redhat-operators-qzrbg" Oct 06 13:51:08 crc kubenswrapper[4757]: I1006 13:51:08.926451 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3a0188f2-1c86-444b-93c9-73509237da1a-catalog-content\") pod \"redhat-operators-qzrbg\" (UID: \"3a0188f2-1c86-444b-93c9-73509237da1a\") " pod="openshift-marketplace/redhat-operators-qzrbg" Oct 06 13:51:08 crc kubenswrapper[4757]: I1006 13:51:08.925876 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3a0188f2-1c86-444b-93c9-73509237da1a-catalog-content\") pod \"redhat-operators-qzrbg\" (UID: \"3a0188f2-1c86-444b-93c9-73509237da1a\") " pod="openshift-marketplace/redhat-operators-qzrbg" Oct 06 13:51:08 crc kubenswrapper[4757]: I1006 13:51:08.926710 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7h8w2\" (UniqueName: \"kubernetes.io/projected/3a0188f2-1c86-444b-93c9-73509237da1a-kube-api-access-7h8w2\") pod \"redhat-operators-qzrbg\" (UID: \"3a0188f2-1c86-444b-93c9-73509237da1a\") " pod="openshift-marketplace/redhat-operators-qzrbg" Oct 06 13:51:08 crc kubenswrapper[4757]: I1006 13:51:08.927493 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3a0188f2-1c86-444b-93c9-73509237da1a-utilities\") pod \"redhat-operators-qzrbg\" (UID: \"3a0188f2-1c86-444b-93c9-73509237da1a\") " pod="openshift-marketplace/redhat-operators-qzrbg" Oct 06 13:51:08 crc kubenswrapper[4757]: I1006 13:51:08.927800 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3a0188f2-1c86-444b-93c9-73509237da1a-utilities\") pod \"redhat-operators-qzrbg\" (UID: \"3a0188f2-1c86-444b-93c9-73509237da1a\") " pod="openshift-marketplace/redhat-operators-qzrbg" Oct 06 13:51:08 crc kubenswrapper[4757]: I1006 13:51:08.945889 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7h8w2\" (UniqueName: \"kubernetes.io/projected/3a0188f2-1c86-444b-93c9-73509237da1a-kube-api-access-7h8w2\") pod \"redhat-operators-qzrbg\" (UID: \"3a0188f2-1c86-444b-93c9-73509237da1a\") " pod="openshift-marketplace/redhat-operators-qzrbg" Oct 06 13:51:09 crc kubenswrapper[4757]: I1006 13:51:09.081967 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qzrbg" Oct 06 13:51:09 crc kubenswrapper[4757]: I1006 13:51:09.211375 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2f5lgl" event={"ID":"e2bebc59-e950-44d3-9b34-e9ae757735ed","Type":"ContainerDied","Data":"9b7a36d3ccb7c5bfbc986bd5824ba3c02b32446a537cbaf123a97917eba93201"} Oct 06 13:51:09 crc kubenswrapper[4757]: I1006 13:51:09.211412 4757 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9b7a36d3ccb7c5bfbc986bd5824ba3c02b32446a537cbaf123a97917eba93201" Oct 06 13:51:09 crc kubenswrapper[4757]: I1006 13:51:09.211485 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2f5lgl" Oct 06 13:51:09 crc kubenswrapper[4757]: I1006 13:51:09.478881 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-qzrbg"] Oct 06 13:51:09 crc kubenswrapper[4757]: W1006 13:51:09.485344 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3a0188f2_1c86_444b_93c9_73509237da1a.slice/crio-35711b327f39cc358541bf94fb3f5720869bbec82b12e6b029f5058f48c2c825 WatchSource:0}: Error finding container 35711b327f39cc358541bf94fb3f5720869bbec82b12e6b029f5058f48c2c825: Status 404 returned error can't find the container with id 35711b327f39cc358541bf94fb3f5720869bbec82b12e6b029f5058f48c2c825 Oct 06 13:51:10 crc kubenswrapper[4757]: I1006 13:51:10.216821 4757 generic.go:334] "Generic (PLEG): container finished" podID="3a0188f2-1c86-444b-93c9-73509237da1a" containerID="7323779d846f1552fb5fa405af69a05af79f47179faa5c104eef0d0e35a84031" exitCode=0 Oct 06 13:51:10 crc kubenswrapper[4757]: I1006 13:51:10.216917 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qzrbg" event={"ID":"3a0188f2-1c86-444b-93c9-73509237da1a","Type":"ContainerDied","Data":"7323779d846f1552fb5fa405af69a05af79f47179faa5c104eef0d0e35a84031"} Oct 06 13:51:10 crc kubenswrapper[4757]: I1006 13:51:10.217199 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qzrbg" event={"ID":"3a0188f2-1c86-444b-93c9-73509237da1a","Type":"ContainerStarted","Data":"35711b327f39cc358541bf94fb3f5720869bbec82b12e6b029f5058f48c2c825"} Oct 06 13:51:12 crc kubenswrapper[4757]: I1006 13:51:12.230872 4757 generic.go:334] "Generic (PLEG): container finished" podID="3a0188f2-1c86-444b-93c9-73509237da1a" containerID="ce527799a3e1d40ddcc3bab3ea2ddeedccb6df670e3c02844ffc8d121223060d" exitCode=0 Oct 06 13:51:12 crc kubenswrapper[4757]: I1006 13:51:12.231030 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qzrbg" event={"ID":"3a0188f2-1c86-444b-93c9-73509237da1a","Type":"ContainerDied","Data":"ce527799a3e1d40ddcc3bab3ea2ddeedccb6df670e3c02844ffc8d121223060d"} Oct 06 13:51:13 crc kubenswrapper[4757]: I1006 13:51:13.238786 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qzrbg" event={"ID":"3a0188f2-1c86-444b-93c9-73509237da1a","Type":"ContainerStarted","Data":"46a5077b4755048a0e58c62fcf40ce5fa31b711c532cf95d84e6c91332265f95"} Oct 06 13:51:13 crc kubenswrapper[4757]: I1006 13:51:13.261617 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-qzrbg" podStartSLOduration=2.666417506 podStartE2EDuration="5.261601023s" podCreationTimestamp="2025-10-06 13:51:08 +0000 UTC" firstStartedPulling="2025-10-06 13:51:10.21816509 +0000 UTC m=+758.715483627" lastFinishedPulling="2025-10-06 13:51:12.813348607 +0000 UTC m=+761.310667144" observedRunningTime="2025-10-06 13:51:13.256712154 +0000 UTC m=+761.754030691" watchObservedRunningTime="2025-10-06 13:51:13.261601023 +0000 UTC m=+761.758919560" Oct 06 13:51:19 crc kubenswrapper[4757]: I1006 13:51:19.082257 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-qzrbg" Oct 06 13:51:19 crc kubenswrapper[4757]: I1006 13:51:19.082767 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-qzrbg" Oct 06 13:51:19 crc kubenswrapper[4757]: I1006 13:51:19.132020 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-qzrbg" Oct 06 13:51:19 crc kubenswrapper[4757]: I1006 13:51:19.309508 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-qzrbg" Oct 06 13:51:20 crc kubenswrapper[4757]: I1006 13:51:20.609544 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-77db4df477-bh7dw"] Oct 06 13:51:20 crc kubenswrapper[4757]: I1006 13:51:20.610800 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-77db4df477-bh7dw" Oct 06 13:51:20 crc kubenswrapper[4757]: I1006 13:51:20.613579 4757 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Oct 06 13:51:20 crc kubenswrapper[4757]: I1006 13:51:20.613856 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Oct 06 13:51:20 crc kubenswrapper[4757]: I1006 13:51:20.614195 4757 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Oct 06 13:51:20 crc kubenswrapper[4757]: I1006 13:51:20.614391 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Oct 06 13:51:20 crc kubenswrapper[4757]: I1006 13:51:20.623837 4757 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-t2sp5" Oct 06 13:51:20 crc kubenswrapper[4757]: I1006 13:51:20.634166 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-77db4df477-bh7dw"] Oct 06 13:51:20 crc kubenswrapper[4757]: I1006 13:51:20.670784 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-49xhz\" (UniqueName: \"kubernetes.io/projected/146e21cb-4669-41fc-9356-be297ab190fc-kube-api-access-49xhz\") pod \"metallb-operator-controller-manager-77db4df477-bh7dw\" (UID: \"146e21cb-4669-41fc-9356-be297ab190fc\") " pod="metallb-system/metallb-operator-controller-manager-77db4df477-bh7dw" Oct 06 13:51:20 crc kubenswrapper[4757]: I1006 13:51:20.670856 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/146e21cb-4669-41fc-9356-be297ab190fc-webhook-cert\") pod \"metallb-operator-controller-manager-77db4df477-bh7dw\" (UID: \"146e21cb-4669-41fc-9356-be297ab190fc\") " pod="metallb-system/metallb-operator-controller-manager-77db4df477-bh7dw" Oct 06 13:51:20 crc kubenswrapper[4757]: I1006 13:51:20.670884 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/146e21cb-4669-41fc-9356-be297ab190fc-apiservice-cert\") pod \"metallb-operator-controller-manager-77db4df477-bh7dw\" (UID: \"146e21cb-4669-41fc-9356-be297ab190fc\") " pod="metallb-system/metallb-operator-controller-manager-77db4df477-bh7dw" Oct 06 13:51:20 crc kubenswrapper[4757]: I1006 13:51:20.772371 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-49xhz\" (UniqueName: \"kubernetes.io/projected/146e21cb-4669-41fc-9356-be297ab190fc-kube-api-access-49xhz\") pod \"metallb-operator-controller-manager-77db4df477-bh7dw\" (UID: \"146e21cb-4669-41fc-9356-be297ab190fc\") " pod="metallb-system/metallb-operator-controller-manager-77db4df477-bh7dw" Oct 06 13:51:20 crc kubenswrapper[4757]: I1006 13:51:20.772434 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/146e21cb-4669-41fc-9356-be297ab190fc-webhook-cert\") pod \"metallb-operator-controller-manager-77db4df477-bh7dw\" (UID: \"146e21cb-4669-41fc-9356-be297ab190fc\") " pod="metallb-system/metallb-operator-controller-manager-77db4df477-bh7dw" Oct 06 13:51:20 crc kubenswrapper[4757]: I1006 13:51:20.772455 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/146e21cb-4669-41fc-9356-be297ab190fc-apiservice-cert\") pod \"metallb-operator-controller-manager-77db4df477-bh7dw\" (UID: \"146e21cb-4669-41fc-9356-be297ab190fc\") " pod="metallb-system/metallb-operator-controller-manager-77db4df477-bh7dw" Oct 06 13:51:20 crc kubenswrapper[4757]: I1006 13:51:20.778643 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/146e21cb-4669-41fc-9356-be297ab190fc-apiservice-cert\") pod \"metallb-operator-controller-manager-77db4df477-bh7dw\" (UID: \"146e21cb-4669-41fc-9356-be297ab190fc\") " pod="metallb-system/metallb-operator-controller-manager-77db4df477-bh7dw" Oct 06 13:51:20 crc kubenswrapper[4757]: I1006 13:51:20.778722 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/146e21cb-4669-41fc-9356-be297ab190fc-webhook-cert\") pod \"metallb-operator-controller-manager-77db4df477-bh7dw\" (UID: \"146e21cb-4669-41fc-9356-be297ab190fc\") " pod="metallb-system/metallb-operator-controller-manager-77db4df477-bh7dw" Oct 06 13:51:20 crc kubenswrapper[4757]: I1006 13:51:20.792858 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-49xhz\" (UniqueName: \"kubernetes.io/projected/146e21cb-4669-41fc-9356-be297ab190fc-kube-api-access-49xhz\") pod \"metallb-operator-controller-manager-77db4df477-bh7dw\" (UID: \"146e21cb-4669-41fc-9356-be297ab190fc\") " pod="metallb-system/metallb-operator-controller-manager-77db4df477-bh7dw" Oct 06 13:51:20 crc kubenswrapper[4757]: I1006 13:51:20.870931 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-65787fbbf-zc897"] Oct 06 13:51:20 crc kubenswrapper[4757]: I1006 13:51:20.872335 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-65787fbbf-zc897" Oct 06 13:51:20 crc kubenswrapper[4757]: I1006 13:51:20.873736 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/75252c24-aec1-4169-9b9b-c7041274bb4d-webhook-cert\") pod \"metallb-operator-webhook-server-65787fbbf-zc897\" (UID: \"75252c24-aec1-4169-9b9b-c7041274bb4d\") " pod="metallb-system/metallb-operator-webhook-server-65787fbbf-zc897" Oct 06 13:51:20 crc kubenswrapper[4757]: I1006 13:51:20.873806 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fgp7n\" (UniqueName: \"kubernetes.io/projected/75252c24-aec1-4169-9b9b-c7041274bb4d-kube-api-access-fgp7n\") pod \"metallb-operator-webhook-server-65787fbbf-zc897\" (UID: \"75252c24-aec1-4169-9b9b-c7041274bb4d\") " pod="metallb-system/metallb-operator-webhook-server-65787fbbf-zc897" Oct 06 13:51:20 crc kubenswrapper[4757]: I1006 13:51:20.873869 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/75252c24-aec1-4169-9b9b-c7041274bb4d-apiservice-cert\") pod \"metallb-operator-webhook-server-65787fbbf-zc897\" (UID: \"75252c24-aec1-4169-9b9b-c7041274bb4d\") " pod="metallb-system/metallb-operator-webhook-server-65787fbbf-zc897" Oct 06 13:51:20 crc kubenswrapper[4757]: I1006 13:51:20.881526 4757 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Oct 06 13:51:20 crc kubenswrapper[4757]: I1006 13:51:20.881854 4757 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Oct 06 13:51:20 crc kubenswrapper[4757]: I1006 13:51:20.887635 4757 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-qjzvm" Oct 06 13:51:20 crc kubenswrapper[4757]: I1006 13:51:20.897550 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-65787fbbf-zc897"] Oct 06 13:51:20 crc kubenswrapper[4757]: I1006 13:51:20.931489 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-77db4df477-bh7dw" Oct 06 13:51:20 crc kubenswrapper[4757]: I1006 13:51:20.980791 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/75252c24-aec1-4169-9b9b-c7041274bb4d-apiservice-cert\") pod \"metallb-operator-webhook-server-65787fbbf-zc897\" (UID: \"75252c24-aec1-4169-9b9b-c7041274bb4d\") " pod="metallb-system/metallb-operator-webhook-server-65787fbbf-zc897" Oct 06 13:51:20 crc kubenswrapper[4757]: I1006 13:51:20.981291 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/75252c24-aec1-4169-9b9b-c7041274bb4d-webhook-cert\") pod \"metallb-operator-webhook-server-65787fbbf-zc897\" (UID: \"75252c24-aec1-4169-9b9b-c7041274bb4d\") " pod="metallb-system/metallb-operator-webhook-server-65787fbbf-zc897" Oct 06 13:51:20 crc kubenswrapper[4757]: I1006 13:51:20.981337 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fgp7n\" (UniqueName: \"kubernetes.io/projected/75252c24-aec1-4169-9b9b-c7041274bb4d-kube-api-access-fgp7n\") pod \"metallb-operator-webhook-server-65787fbbf-zc897\" (UID: \"75252c24-aec1-4169-9b9b-c7041274bb4d\") " pod="metallb-system/metallb-operator-webhook-server-65787fbbf-zc897" Oct 06 13:51:20 crc kubenswrapper[4757]: I1006 13:51:20.986538 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/75252c24-aec1-4169-9b9b-c7041274bb4d-webhook-cert\") pod \"metallb-operator-webhook-server-65787fbbf-zc897\" (UID: \"75252c24-aec1-4169-9b9b-c7041274bb4d\") " pod="metallb-system/metallb-operator-webhook-server-65787fbbf-zc897" Oct 06 13:51:20 crc kubenswrapper[4757]: I1006 13:51:20.987426 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/75252c24-aec1-4169-9b9b-c7041274bb4d-apiservice-cert\") pod \"metallb-operator-webhook-server-65787fbbf-zc897\" (UID: \"75252c24-aec1-4169-9b9b-c7041274bb4d\") " pod="metallb-system/metallb-operator-webhook-server-65787fbbf-zc897" Oct 06 13:51:21 crc kubenswrapper[4757]: I1006 13:51:21.002279 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fgp7n\" (UniqueName: \"kubernetes.io/projected/75252c24-aec1-4169-9b9b-c7041274bb4d-kube-api-access-fgp7n\") pod \"metallb-operator-webhook-server-65787fbbf-zc897\" (UID: \"75252c24-aec1-4169-9b9b-c7041274bb4d\") " pod="metallb-system/metallb-operator-webhook-server-65787fbbf-zc897" Oct 06 13:51:21 crc kubenswrapper[4757]: I1006 13:51:21.196737 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-65787fbbf-zc897" Oct 06 13:51:21 crc kubenswrapper[4757]: I1006 13:51:21.500589 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-77db4df477-bh7dw"] Oct 06 13:51:21 crc kubenswrapper[4757]: W1006 13:51:21.512631 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod146e21cb_4669_41fc_9356_be297ab190fc.slice/crio-eae4a3fc5cc6312407c13bdd3dfb5197c85d380833c8dcc45408610e01de9d57 WatchSource:0}: Error finding container eae4a3fc5cc6312407c13bdd3dfb5197c85d380833c8dcc45408610e01de9d57: Status 404 returned error can't find the container with id eae4a3fc5cc6312407c13bdd3dfb5197c85d380833c8dcc45408610e01de9d57 Oct 06 13:51:21 crc kubenswrapper[4757]: I1006 13:51:21.554110 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-qzrbg"] Oct 06 13:51:21 crc kubenswrapper[4757]: I1006 13:51:21.554343 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-qzrbg" podUID="3a0188f2-1c86-444b-93c9-73509237da1a" containerName="registry-server" containerID="cri-o://46a5077b4755048a0e58c62fcf40ce5fa31b711c532cf95d84e6c91332265f95" gracePeriod=2 Oct 06 13:51:21 crc kubenswrapper[4757]: I1006 13:51:21.668173 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-65787fbbf-zc897"] Oct 06 13:51:21 crc kubenswrapper[4757]: W1006 13:51:21.694817 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod75252c24_aec1_4169_9b9b_c7041274bb4d.slice/crio-f115f32b3dc6f96e3b3df47635098ebca80dc890994d3a6cb04782ce42ed1886 WatchSource:0}: Error finding container f115f32b3dc6f96e3b3df47635098ebca80dc890994d3a6cb04782ce42ed1886: Status 404 returned error can't find the container with id f115f32b3dc6f96e3b3df47635098ebca80dc890994d3a6cb04782ce42ed1886 Oct 06 13:51:22 crc kubenswrapper[4757]: I1006 13:51:22.025339 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qzrbg" Oct 06 13:51:22 crc kubenswrapper[4757]: I1006 13:51:22.101127 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3a0188f2-1c86-444b-93c9-73509237da1a-catalog-content\") pod \"3a0188f2-1c86-444b-93c9-73509237da1a\" (UID: \"3a0188f2-1c86-444b-93c9-73509237da1a\") " Oct 06 13:51:22 crc kubenswrapper[4757]: I1006 13:51:22.101259 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3a0188f2-1c86-444b-93c9-73509237da1a-utilities\") pod \"3a0188f2-1c86-444b-93c9-73509237da1a\" (UID: \"3a0188f2-1c86-444b-93c9-73509237da1a\") " Oct 06 13:51:22 crc kubenswrapper[4757]: I1006 13:51:22.103656 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3a0188f2-1c86-444b-93c9-73509237da1a-utilities" (OuterVolumeSpecName: "utilities") pod "3a0188f2-1c86-444b-93c9-73509237da1a" (UID: "3a0188f2-1c86-444b-93c9-73509237da1a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 13:51:22 crc kubenswrapper[4757]: I1006 13:51:22.103742 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7h8w2\" (UniqueName: \"kubernetes.io/projected/3a0188f2-1c86-444b-93c9-73509237da1a-kube-api-access-7h8w2\") pod \"3a0188f2-1c86-444b-93c9-73509237da1a\" (UID: \"3a0188f2-1c86-444b-93c9-73509237da1a\") " Oct 06 13:51:22 crc kubenswrapper[4757]: I1006 13:51:22.104057 4757 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3a0188f2-1c86-444b-93c9-73509237da1a-utilities\") on node \"crc\" DevicePath \"\"" Oct 06 13:51:22 crc kubenswrapper[4757]: I1006 13:51:22.108717 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3a0188f2-1c86-444b-93c9-73509237da1a-kube-api-access-7h8w2" (OuterVolumeSpecName: "kube-api-access-7h8w2") pod "3a0188f2-1c86-444b-93c9-73509237da1a" (UID: "3a0188f2-1c86-444b-93c9-73509237da1a"). InnerVolumeSpecName "kube-api-access-7h8w2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:51:22 crc kubenswrapper[4757]: I1006 13:51:22.185177 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3a0188f2-1c86-444b-93c9-73509237da1a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3a0188f2-1c86-444b-93c9-73509237da1a" (UID: "3a0188f2-1c86-444b-93c9-73509237da1a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 13:51:22 crc kubenswrapper[4757]: I1006 13:51:22.205771 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7h8w2\" (UniqueName: \"kubernetes.io/projected/3a0188f2-1c86-444b-93c9-73509237da1a-kube-api-access-7h8w2\") on node \"crc\" DevicePath \"\"" Oct 06 13:51:22 crc kubenswrapper[4757]: I1006 13:51:22.205827 4757 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3a0188f2-1c86-444b-93c9-73509237da1a-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 06 13:51:22 crc kubenswrapper[4757]: I1006 13:51:22.290910 4757 generic.go:334] "Generic (PLEG): container finished" podID="3a0188f2-1c86-444b-93c9-73509237da1a" containerID="46a5077b4755048a0e58c62fcf40ce5fa31b711c532cf95d84e6c91332265f95" exitCode=0 Oct 06 13:51:22 crc kubenswrapper[4757]: I1006 13:51:22.290966 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qzrbg" event={"ID":"3a0188f2-1c86-444b-93c9-73509237da1a","Type":"ContainerDied","Data":"46a5077b4755048a0e58c62fcf40ce5fa31b711c532cf95d84e6c91332265f95"} Oct 06 13:51:22 crc kubenswrapper[4757]: I1006 13:51:22.291009 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qzrbg" event={"ID":"3a0188f2-1c86-444b-93c9-73509237da1a","Type":"ContainerDied","Data":"35711b327f39cc358541bf94fb3f5720869bbec82b12e6b029f5058f48c2c825"} Oct 06 13:51:22 crc kubenswrapper[4757]: I1006 13:51:22.291017 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qzrbg" Oct 06 13:51:22 crc kubenswrapper[4757]: I1006 13:51:22.291038 4757 scope.go:117] "RemoveContainer" containerID="46a5077b4755048a0e58c62fcf40ce5fa31b711c532cf95d84e6c91332265f95" Oct 06 13:51:22 crc kubenswrapper[4757]: I1006 13:51:22.295810 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-77db4df477-bh7dw" event={"ID":"146e21cb-4669-41fc-9356-be297ab190fc","Type":"ContainerStarted","Data":"eae4a3fc5cc6312407c13bdd3dfb5197c85d380833c8dcc45408610e01de9d57"} Oct 06 13:51:22 crc kubenswrapper[4757]: I1006 13:51:22.297791 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-65787fbbf-zc897" event={"ID":"75252c24-aec1-4169-9b9b-c7041274bb4d","Type":"ContainerStarted","Data":"f115f32b3dc6f96e3b3df47635098ebca80dc890994d3a6cb04782ce42ed1886"} Oct 06 13:51:22 crc kubenswrapper[4757]: I1006 13:51:22.313364 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-qzrbg"] Oct 06 13:51:22 crc kubenswrapper[4757]: I1006 13:51:22.313791 4757 scope.go:117] "RemoveContainer" containerID="ce527799a3e1d40ddcc3bab3ea2ddeedccb6df670e3c02844ffc8d121223060d" Oct 06 13:51:22 crc kubenswrapper[4757]: I1006 13:51:22.316009 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-qzrbg"] Oct 06 13:51:22 crc kubenswrapper[4757]: I1006 13:51:22.332070 4757 scope.go:117] "RemoveContainer" containerID="7323779d846f1552fb5fa405af69a05af79f47179faa5c104eef0d0e35a84031" Oct 06 13:51:22 crc kubenswrapper[4757]: I1006 13:51:22.355238 4757 scope.go:117] "RemoveContainer" containerID="46a5077b4755048a0e58c62fcf40ce5fa31b711c532cf95d84e6c91332265f95" Oct 06 13:51:22 crc kubenswrapper[4757]: E1006 13:51:22.355742 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"46a5077b4755048a0e58c62fcf40ce5fa31b711c532cf95d84e6c91332265f95\": container with ID starting with 46a5077b4755048a0e58c62fcf40ce5fa31b711c532cf95d84e6c91332265f95 not found: ID does not exist" containerID="46a5077b4755048a0e58c62fcf40ce5fa31b711c532cf95d84e6c91332265f95" Oct 06 13:51:22 crc kubenswrapper[4757]: I1006 13:51:22.355789 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"46a5077b4755048a0e58c62fcf40ce5fa31b711c532cf95d84e6c91332265f95"} err="failed to get container status \"46a5077b4755048a0e58c62fcf40ce5fa31b711c532cf95d84e6c91332265f95\": rpc error: code = NotFound desc = could not find container \"46a5077b4755048a0e58c62fcf40ce5fa31b711c532cf95d84e6c91332265f95\": container with ID starting with 46a5077b4755048a0e58c62fcf40ce5fa31b711c532cf95d84e6c91332265f95 not found: ID does not exist" Oct 06 13:51:22 crc kubenswrapper[4757]: I1006 13:51:22.355819 4757 scope.go:117] "RemoveContainer" containerID="ce527799a3e1d40ddcc3bab3ea2ddeedccb6df670e3c02844ffc8d121223060d" Oct 06 13:51:22 crc kubenswrapper[4757]: E1006 13:51:22.356171 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ce527799a3e1d40ddcc3bab3ea2ddeedccb6df670e3c02844ffc8d121223060d\": container with ID starting with ce527799a3e1d40ddcc3bab3ea2ddeedccb6df670e3c02844ffc8d121223060d not found: ID does not exist" containerID="ce527799a3e1d40ddcc3bab3ea2ddeedccb6df670e3c02844ffc8d121223060d" Oct 06 13:51:22 crc kubenswrapper[4757]: I1006 13:51:22.356281 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ce527799a3e1d40ddcc3bab3ea2ddeedccb6df670e3c02844ffc8d121223060d"} err="failed to get container status \"ce527799a3e1d40ddcc3bab3ea2ddeedccb6df670e3c02844ffc8d121223060d\": rpc error: code = NotFound desc = could not find container \"ce527799a3e1d40ddcc3bab3ea2ddeedccb6df670e3c02844ffc8d121223060d\": container with ID starting with ce527799a3e1d40ddcc3bab3ea2ddeedccb6df670e3c02844ffc8d121223060d not found: ID does not exist" Oct 06 13:51:22 crc kubenswrapper[4757]: I1006 13:51:22.356384 4757 scope.go:117] "RemoveContainer" containerID="7323779d846f1552fb5fa405af69a05af79f47179faa5c104eef0d0e35a84031" Oct 06 13:51:22 crc kubenswrapper[4757]: E1006 13:51:22.356730 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7323779d846f1552fb5fa405af69a05af79f47179faa5c104eef0d0e35a84031\": container with ID starting with 7323779d846f1552fb5fa405af69a05af79f47179faa5c104eef0d0e35a84031 not found: ID does not exist" containerID="7323779d846f1552fb5fa405af69a05af79f47179faa5c104eef0d0e35a84031" Oct 06 13:51:22 crc kubenswrapper[4757]: I1006 13:51:22.356755 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7323779d846f1552fb5fa405af69a05af79f47179faa5c104eef0d0e35a84031"} err="failed to get container status \"7323779d846f1552fb5fa405af69a05af79f47179faa5c104eef0d0e35a84031\": rpc error: code = NotFound desc = could not find container \"7323779d846f1552fb5fa405af69a05af79f47179faa5c104eef0d0e35a84031\": container with ID starting with 7323779d846f1552fb5fa405af69a05af79f47179faa5c104eef0d0e35a84031 not found: ID does not exist" Oct 06 13:51:24 crc kubenswrapper[4757]: I1006 13:51:24.188788 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3a0188f2-1c86-444b-93c9-73509237da1a" path="/var/lib/kubelet/pods/3a0188f2-1c86-444b-93c9-73509237da1a/volumes" Oct 06 13:51:27 crc kubenswrapper[4757]: I1006 13:51:27.342316 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-77db4df477-bh7dw" event={"ID":"146e21cb-4669-41fc-9356-be297ab190fc","Type":"ContainerStarted","Data":"81f67e00de85c9b702bad893a0ad7dee3f65ba6fed2ae4f0b53f30830f21d628"} Oct 06 13:51:27 crc kubenswrapper[4757]: I1006 13:51:27.343802 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-77db4df477-bh7dw" Oct 06 13:51:27 crc kubenswrapper[4757]: I1006 13:51:27.344494 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-65787fbbf-zc897" event={"ID":"75252c24-aec1-4169-9b9b-c7041274bb4d","Type":"ContainerStarted","Data":"b729b7d6a3112b2451fad7d25ecd5fc827e234b0aba2c27d802cc83750828a5c"} Oct 06 13:51:27 crc kubenswrapper[4757]: I1006 13:51:27.344711 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-65787fbbf-zc897" Oct 06 13:51:27 crc kubenswrapper[4757]: I1006 13:51:27.367901 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-77db4df477-bh7dw" podStartSLOduration=2.321687866 podStartE2EDuration="7.367874938s" podCreationTimestamp="2025-10-06 13:51:20 +0000 UTC" firstStartedPulling="2025-10-06 13:51:21.516763226 +0000 UTC m=+770.014081763" lastFinishedPulling="2025-10-06 13:51:26.562950298 +0000 UTC m=+775.060268835" observedRunningTime="2025-10-06 13:51:27.364765647 +0000 UTC m=+775.862084214" watchObservedRunningTime="2025-10-06 13:51:27.367874938 +0000 UTC m=+775.865193505" Oct 06 13:51:27 crc kubenswrapper[4757]: I1006 13:51:27.390769 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-65787fbbf-zc897" podStartSLOduration=2.5151234970000003 podStartE2EDuration="7.390750611s" podCreationTimestamp="2025-10-06 13:51:20 +0000 UTC" firstStartedPulling="2025-10-06 13:51:21.708236453 +0000 UTC m=+770.205554990" lastFinishedPulling="2025-10-06 13:51:26.583863567 +0000 UTC m=+775.081182104" observedRunningTime="2025-10-06 13:51:27.389168619 +0000 UTC m=+775.886487226" watchObservedRunningTime="2025-10-06 13:51:27.390750611 +0000 UTC m=+775.888069158" Oct 06 13:51:31 crc kubenswrapper[4757]: I1006 13:51:31.160687 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-xl6q9"] Oct 06 13:51:31 crc kubenswrapper[4757]: E1006 13:51:31.161319 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a0188f2-1c86-444b-93c9-73509237da1a" containerName="extract-utilities" Oct 06 13:51:31 crc kubenswrapper[4757]: I1006 13:51:31.161341 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a0188f2-1c86-444b-93c9-73509237da1a" containerName="extract-utilities" Oct 06 13:51:31 crc kubenswrapper[4757]: E1006 13:51:31.161369 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a0188f2-1c86-444b-93c9-73509237da1a" containerName="extract-content" Oct 06 13:51:31 crc kubenswrapper[4757]: I1006 13:51:31.161381 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a0188f2-1c86-444b-93c9-73509237da1a" containerName="extract-content" Oct 06 13:51:31 crc kubenswrapper[4757]: E1006 13:51:31.161398 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a0188f2-1c86-444b-93c9-73509237da1a" containerName="registry-server" Oct 06 13:51:31 crc kubenswrapper[4757]: I1006 13:51:31.161412 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a0188f2-1c86-444b-93c9-73509237da1a" containerName="registry-server" Oct 06 13:51:31 crc kubenswrapper[4757]: I1006 13:51:31.161597 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="3a0188f2-1c86-444b-93c9-73509237da1a" containerName="registry-server" Oct 06 13:51:31 crc kubenswrapper[4757]: I1006 13:51:31.162917 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xl6q9" Oct 06 13:51:31 crc kubenswrapper[4757]: I1006 13:51:31.181741 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-xl6q9"] Oct 06 13:51:31 crc kubenswrapper[4757]: I1006 13:51:31.332595 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4f4e4783-e237-4435-854c-798de6f16672-catalog-content\") pod \"redhat-marketplace-xl6q9\" (UID: \"4f4e4783-e237-4435-854c-798de6f16672\") " pod="openshift-marketplace/redhat-marketplace-xl6q9" Oct 06 13:51:31 crc kubenswrapper[4757]: I1006 13:51:31.332686 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-56j87\" (UniqueName: \"kubernetes.io/projected/4f4e4783-e237-4435-854c-798de6f16672-kube-api-access-56j87\") pod \"redhat-marketplace-xl6q9\" (UID: \"4f4e4783-e237-4435-854c-798de6f16672\") " pod="openshift-marketplace/redhat-marketplace-xl6q9" Oct 06 13:51:31 crc kubenswrapper[4757]: I1006 13:51:31.332818 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4f4e4783-e237-4435-854c-798de6f16672-utilities\") pod \"redhat-marketplace-xl6q9\" (UID: \"4f4e4783-e237-4435-854c-798de6f16672\") " pod="openshift-marketplace/redhat-marketplace-xl6q9" Oct 06 13:51:31 crc kubenswrapper[4757]: I1006 13:51:31.434379 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4f4e4783-e237-4435-854c-798de6f16672-catalog-content\") pod \"redhat-marketplace-xl6q9\" (UID: \"4f4e4783-e237-4435-854c-798de6f16672\") " pod="openshift-marketplace/redhat-marketplace-xl6q9" Oct 06 13:51:31 crc kubenswrapper[4757]: I1006 13:51:31.434476 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-56j87\" (UniqueName: \"kubernetes.io/projected/4f4e4783-e237-4435-854c-798de6f16672-kube-api-access-56j87\") pod \"redhat-marketplace-xl6q9\" (UID: \"4f4e4783-e237-4435-854c-798de6f16672\") " pod="openshift-marketplace/redhat-marketplace-xl6q9" Oct 06 13:51:31 crc kubenswrapper[4757]: I1006 13:51:31.434516 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4f4e4783-e237-4435-854c-798de6f16672-utilities\") pod \"redhat-marketplace-xl6q9\" (UID: \"4f4e4783-e237-4435-854c-798de6f16672\") " pod="openshift-marketplace/redhat-marketplace-xl6q9" Oct 06 13:51:31 crc kubenswrapper[4757]: I1006 13:51:31.435131 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4f4e4783-e237-4435-854c-798de6f16672-utilities\") pod \"redhat-marketplace-xl6q9\" (UID: \"4f4e4783-e237-4435-854c-798de6f16672\") " pod="openshift-marketplace/redhat-marketplace-xl6q9" Oct 06 13:51:31 crc kubenswrapper[4757]: I1006 13:51:31.435243 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4f4e4783-e237-4435-854c-798de6f16672-catalog-content\") pod \"redhat-marketplace-xl6q9\" (UID: \"4f4e4783-e237-4435-854c-798de6f16672\") " pod="openshift-marketplace/redhat-marketplace-xl6q9" Oct 06 13:51:31 crc kubenswrapper[4757]: I1006 13:51:31.460198 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-56j87\" (UniqueName: \"kubernetes.io/projected/4f4e4783-e237-4435-854c-798de6f16672-kube-api-access-56j87\") pod \"redhat-marketplace-xl6q9\" (UID: \"4f4e4783-e237-4435-854c-798de6f16672\") " pod="openshift-marketplace/redhat-marketplace-xl6q9" Oct 06 13:51:31 crc kubenswrapper[4757]: I1006 13:51:31.487047 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xl6q9" Oct 06 13:51:31 crc kubenswrapper[4757]: I1006 13:51:31.936228 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-xl6q9"] Oct 06 13:51:32 crc kubenswrapper[4757]: E1006 13:51:32.301712 4757 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4f4e4783_e237_4435_854c_798de6f16672.slice/crio-b8244e2d002b2195f6979072f8c1d2c0cb92ef6ea7fd81f52950cea997ff9a00.scope\": RecentStats: unable to find data in memory cache]" Oct 06 13:51:32 crc kubenswrapper[4757]: I1006 13:51:32.371062 4757 generic.go:334] "Generic (PLEG): container finished" podID="4f4e4783-e237-4435-854c-798de6f16672" containerID="b8244e2d002b2195f6979072f8c1d2c0cb92ef6ea7fd81f52950cea997ff9a00" exitCode=0 Oct 06 13:51:32 crc kubenswrapper[4757]: I1006 13:51:32.371132 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xl6q9" event={"ID":"4f4e4783-e237-4435-854c-798de6f16672","Type":"ContainerDied","Data":"b8244e2d002b2195f6979072f8c1d2c0cb92ef6ea7fd81f52950cea997ff9a00"} Oct 06 13:51:32 crc kubenswrapper[4757]: I1006 13:51:32.371189 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xl6q9" event={"ID":"4f4e4783-e237-4435-854c-798de6f16672","Type":"ContainerStarted","Data":"10f4042b960688cf2f580532133ed6cdc662b33b82c49c4273ec4aa8d3c36f4a"} Oct 06 13:51:33 crc kubenswrapper[4757]: I1006 13:51:33.379197 4757 generic.go:334] "Generic (PLEG): container finished" podID="4f4e4783-e237-4435-854c-798de6f16672" containerID="d27ff9a4385528b6ced0a29ef1e3cfd6694c26c50cd245e1e6e534394383508c" exitCode=0 Oct 06 13:51:33 crc kubenswrapper[4757]: I1006 13:51:33.380119 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xl6q9" event={"ID":"4f4e4783-e237-4435-854c-798de6f16672","Type":"ContainerDied","Data":"d27ff9a4385528b6ced0a29ef1e3cfd6694c26c50cd245e1e6e534394383508c"} Oct 06 13:51:34 crc kubenswrapper[4757]: I1006 13:51:34.361495 4757 patch_prober.go:28] interesting pod/machine-config-daemon-7tb7h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 06 13:51:34 crc kubenswrapper[4757]: I1006 13:51:34.361837 4757 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 06 13:51:34 crc kubenswrapper[4757]: I1006 13:51:34.361890 4757 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" Oct 06 13:51:34 crc kubenswrapper[4757]: I1006 13:51:34.362621 4757 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"6542a2ffbfd366a895d5ce83b4ab3a725c3018b8a608b5d8f6bff2292762cd35"} pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 06 13:51:34 crc kubenswrapper[4757]: I1006 13:51:34.362679 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" containerName="machine-config-daemon" containerID="cri-o://6542a2ffbfd366a895d5ce83b4ab3a725c3018b8a608b5d8f6bff2292762cd35" gracePeriod=600 Oct 06 13:51:34 crc kubenswrapper[4757]: I1006 13:51:34.387371 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xl6q9" event={"ID":"4f4e4783-e237-4435-854c-798de6f16672","Type":"ContainerStarted","Data":"08aa33cb545276a7258fd736ff219f36fdb42a0b85a4b48bba913763b156cada"} Oct 06 13:51:34 crc kubenswrapper[4757]: I1006 13:51:34.407650 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-xl6q9" podStartSLOduration=1.9933390260000001 podStartE2EDuration="3.407628193s" podCreationTimestamp="2025-10-06 13:51:31 +0000 UTC" firstStartedPulling="2025-10-06 13:51:32.373324335 +0000 UTC m=+780.870642862" lastFinishedPulling="2025-10-06 13:51:33.787613482 +0000 UTC m=+782.284932029" observedRunningTime="2025-10-06 13:51:34.407040154 +0000 UTC m=+782.904358701" watchObservedRunningTime="2025-10-06 13:51:34.407628193 +0000 UTC m=+782.904946730" Oct 06 13:51:35 crc kubenswrapper[4757]: I1006 13:51:35.396213 4757 generic.go:334] "Generic (PLEG): container finished" podID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" containerID="6542a2ffbfd366a895d5ce83b4ab3a725c3018b8a608b5d8f6bff2292762cd35" exitCode=0 Oct 06 13:51:35 crc kubenswrapper[4757]: I1006 13:51:35.396288 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" event={"ID":"0010c888-d5ad-4b2b-8309-1647fdf0dee3","Type":"ContainerDied","Data":"6542a2ffbfd366a895d5ce83b4ab3a725c3018b8a608b5d8f6bff2292762cd35"} Oct 06 13:51:35 crc kubenswrapper[4757]: I1006 13:51:35.397076 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" event={"ID":"0010c888-d5ad-4b2b-8309-1647fdf0dee3","Type":"ContainerStarted","Data":"c10bf450268206bb6caa070d8d9e8b690b70b76277c3af98f50337e231aead63"} Oct 06 13:51:35 crc kubenswrapper[4757]: I1006 13:51:35.397128 4757 scope.go:117] "RemoveContainer" containerID="c1caaff02f46fc52972611163537c1807a4d146f35cf5ae6f7d131326516fd78" Oct 06 13:51:41 crc kubenswrapper[4757]: I1006 13:51:41.204383 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-65787fbbf-zc897" Oct 06 13:51:41 crc kubenswrapper[4757]: I1006 13:51:41.488131 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-xl6q9" Oct 06 13:51:41 crc kubenswrapper[4757]: I1006 13:51:41.488653 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-xl6q9" Oct 06 13:51:41 crc kubenswrapper[4757]: I1006 13:51:41.550381 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-xl6q9" Oct 06 13:51:42 crc kubenswrapper[4757]: I1006 13:51:42.484788 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-xl6q9" Oct 06 13:51:42 crc kubenswrapper[4757]: I1006 13:51:42.530573 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-xl6q9"] Oct 06 13:51:44 crc kubenswrapper[4757]: I1006 13:51:44.457028 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-xl6q9" podUID="4f4e4783-e237-4435-854c-798de6f16672" containerName="registry-server" containerID="cri-o://08aa33cb545276a7258fd736ff219f36fdb42a0b85a4b48bba913763b156cada" gracePeriod=2 Oct 06 13:51:44 crc kubenswrapper[4757]: I1006 13:51:44.862723 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xl6q9" Oct 06 13:51:44 crc kubenswrapper[4757]: I1006 13:51:44.911645 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4f4e4783-e237-4435-854c-798de6f16672-utilities\") pod \"4f4e4783-e237-4435-854c-798de6f16672\" (UID: \"4f4e4783-e237-4435-854c-798de6f16672\") " Oct 06 13:51:44 crc kubenswrapper[4757]: I1006 13:51:44.911741 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-56j87\" (UniqueName: \"kubernetes.io/projected/4f4e4783-e237-4435-854c-798de6f16672-kube-api-access-56j87\") pod \"4f4e4783-e237-4435-854c-798de6f16672\" (UID: \"4f4e4783-e237-4435-854c-798de6f16672\") " Oct 06 13:51:44 crc kubenswrapper[4757]: I1006 13:51:44.911803 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4f4e4783-e237-4435-854c-798de6f16672-catalog-content\") pod \"4f4e4783-e237-4435-854c-798de6f16672\" (UID: \"4f4e4783-e237-4435-854c-798de6f16672\") " Oct 06 13:51:44 crc kubenswrapper[4757]: I1006 13:51:44.919564 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4f4e4783-e237-4435-854c-798de6f16672-kube-api-access-56j87" (OuterVolumeSpecName: "kube-api-access-56j87") pod "4f4e4783-e237-4435-854c-798de6f16672" (UID: "4f4e4783-e237-4435-854c-798de6f16672"). InnerVolumeSpecName "kube-api-access-56j87". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:51:44 crc kubenswrapper[4757]: I1006 13:51:44.924539 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4f4e4783-e237-4435-854c-798de6f16672-utilities" (OuterVolumeSpecName: "utilities") pod "4f4e4783-e237-4435-854c-798de6f16672" (UID: "4f4e4783-e237-4435-854c-798de6f16672"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 13:51:44 crc kubenswrapper[4757]: I1006 13:51:44.929986 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4f4e4783-e237-4435-854c-798de6f16672-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4f4e4783-e237-4435-854c-798de6f16672" (UID: "4f4e4783-e237-4435-854c-798de6f16672"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 13:51:45 crc kubenswrapper[4757]: I1006 13:51:45.012927 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-56j87\" (UniqueName: \"kubernetes.io/projected/4f4e4783-e237-4435-854c-798de6f16672-kube-api-access-56j87\") on node \"crc\" DevicePath \"\"" Oct 06 13:51:45 crc kubenswrapper[4757]: I1006 13:51:45.012968 4757 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4f4e4783-e237-4435-854c-798de6f16672-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 06 13:51:45 crc kubenswrapper[4757]: I1006 13:51:45.012981 4757 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4f4e4783-e237-4435-854c-798de6f16672-utilities\") on node \"crc\" DevicePath \"\"" Oct 06 13:51:45 crc kubenswrapper[4757]: I1006 13:51:45.463205 4757 generic.go:334] "Generic (PLEG): container finished" podID="4f4e4783-e237-4435-854c-798de6f16672" containerID="08aa33cb545276a7258fd736ff219f36fdb42a0b85a4b48bba913763b156cada" exitCode=0 Oct 06 13:51:45 crc kubenswrapper[4757]: I1006 13:51:45.463262 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xl6q9" event={"ID":"4f4e4783-e237-4435-854c-798de6f16672","Type":"ContainerDied","Data":"08aa33cb545276a7258fd736ff219f36fdb42a0b85a4b48bba913763b156cada"} Oct 06 13:51:45 crc kubenswrapper[4757]: I1006 13:51:45.463304 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xl6q9" Oct 06 13:51:45 crc kubenswrapper[4757]: I1006 13:51:45.463320 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xl6q9" event={"ID":"4f4e4783-e237-4435-854c-798de6f16672","Type":"ContainerDied","Data":"10f4042b960688cf2f580532133ed6cdc662b33b82c49c4273ec4aa8d3c36f4a"} Oct 06 13:51:45 crc kubenswrapper[4757]: I1006 13:51:45.463352 4757 scope.go:117] "RemoveContainer" containerID="08aa33cb545276a7258fd736ff219f36fdb42a0b85a4b48bba913763b156cada" Oct 06 13:51:45 crc kubenswrapper[4757]: I1006 13:51:45.480634 4757 scope.go:117] "RemoveContainer" containerID="d27ff9a4385528b6ced0a29ef1e3cfd6694c26c50cd245e1e6e534394383508c" Oct 06 13:51:45 crc kubenswrapper[4757]: I1006 13:51:45.498208 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-xl6q9"] Oct 06 13:51:45 crc kubenswrapper[4757]: I1006 13:51:45.501218 4757 scope.go:117] "RemoveContainer" containerID="b8244e2d002b2195f6979072f8c1d2c0cb92ef6ea7fd81f52950cea997ff9a00" Oct 06 13:51:45 crc kubenswrapper[4757]: I1006 13:51:45.504451 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-xl6q9"] Oct 06 13:51:45 crc kubenswrapper[4757]: I1006 13:51:45.538857 4757 scope.go:117] "RemoveContainer" containerID="08aa33cb545276a7258fd736ff219f36fdb42a0b85a4b48bba913763b156cada" Oct 06 13:51:45 crc kubenswrapper[4757]: E1006 13:51:45.539723 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"08aa33cb545276a7258fd736ff219f36fdb42a0b85a4b48bba913763b156cada\": container with ID starting with 08aa33cb545276a7258fd736ff219f36fdb42a0b85a4b48bba913763b156cada not found: ID does not exist" containerID="08aa33cb545276a7258fd736ff219f36fdb42a0b85a4b48bba913763b156cada" Oct 06 13:51:45 crc kubenswrapper[4757]: I1006 13:51:45.539782 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"08aa33cb545276a7258fd736ff219f36fdb42a0b85a4b48bba913763b156cada"} err="failed to get container status \"08aa33cb545276a7258fd736ff219f36fdb42a0b85a4b48bba913763b156cada\": rpc error: code = NotFound desc = could not find container \"08aa33cb545276a7258fd736ff219f36fdb42a0b85a4b48bba913763b156cada\": container with ID starting with 08aa33cb545276a7258fd736ff219f36fdb42a0b85a4b48bba913763b156cada not found: ID does not exist" Oct 06 13:51:45 crc kubenswrapper[4757]: I1006 13:51:45.539815 4757 scope.go:117] "RemoveContainer" containerID="d27ff9a4385528b6ced0a29ef1e3cfd6694c26c50cd245e1e6e534394383508c" Oct 06 13:51:45 crc kubenswrapper[4757]: E1006 13:51:45.541698 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d27ff9a4385528b6ced0a29ef1e3cfd6694c26c50cd245e1e6e534394383508c\": container with ID starting with d27ff9a4385528b6ced0a29ef1e3cfd6694c26c50cd245e1e6e534394383508c not found: ID does not exist" containerID="d27ff9a4385528b6ced0a29ef1e3cfd6694c26c50cd245e1e6e534394383508c" Oct 06 13:51:45 crc kubenswrapper[4757]: I1006 13:51:45.541758 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d27ff9a4385528b6ced0a29ef1e3cfd6694c26c50cd245e1e6e534394383508c"} err="failed to get container status \"d27ff9a4385528b6ced0a29ef1e3cfd6694c26c50cd245e1e6e534394383508c\": rpc error: code = NotFound desc = could not find container \"d27ff9a4385528b6ced0a29ef1e3cfd6694c26c50cd245e1e6e534394383508c\": container with ID starting with d27ff9a4385528b6ced0a29ef1e3cfd6694c26c50cd245e1e6e534394383508c not found: ID does not exist" Oct 06 13:51:45 crc kubenswrapper[4757]: I1006 13:51:45.541790 4757 scope.go:117] "RemoveContainer" containerID="b8244e2d002b2195f6979072f8c1d2c0cb92ef6ea7fd81f52950cea997ff9a00" Oct 06 13:51:45 crc kubenswrapper[4757]: E1006 13:51:45.542404 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b8244e2d002b2195f6979072f8c1d2c0cb92ef6ea7fd81f52950cea997ff9a00\": container with ID starting with b8244e2d002b2195f6979072f8c1d2c0cb92ef6ea7fd81f52950cea997ff9a00 not found: ID does not exist" containerID="b8244e2d002b2195f6979072f8c1d2c0cb92ef6ea7fd81f52950cea997ff9a00" Oct 06 13:51:45 crc kubenswrapper[4757]: I1006 13:51:45.542455 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b8244e2d002b2195f6979072f8c1d2c0cb92ef6ea7fd81f52950cea997ff9a00"} err="failed to get container status \"b8244e2d002b2195f6979072f8c1d2c0cb92ef6ea7fd81f52950cea997ff9a00\": rpc error: code = NotFound desc = could not find container \"b8244e2d002b2195f6979072f8c1d2c0cb92ef6ea7fd81f52950cea997ff9a00\": container with ID starting with b8244e2d002b2195f6979072f8c1d2c0cb92ef6ea7fd81f52950cea997ff9a00 not found: ID does not exist" Oct 06 13:51:46 crc kubenswrapper[4757]: I1006 13:51:46.194892 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4f4e4783-e237-4435-854c-798de6f16672" path="/var/lib/kubelet/pods/4f4e4783-e237-4435-854c-798de6f16672/volumes" Oct 06 13:51:56 crc kubenswrapper[4757]: I1006 13:51:56.406340 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-9qgx5"] Oct 06 13:51:56 crc kubenswrapper[4757]: E1006 13:51:56.407356 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f4e4783-e237-4435-854c-798de6f16672" containerName="extract-utilities" Oct 06 13:51:56 crc kubenswrapper[4757]: I1006 13:51:56.407378 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f4e4783-e237-4435-854c-798de6f16672" containerName="extract-utilities" Oct 06 13:51:56 crc kubenswrapper[4757]: E1006 13:51:56.407396 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f4e4783-e237-4435-854c-798de6f16672" containerName="extract-content" Oct 06 13:51:56 crc kubenswrapper[4757]: I1006 13:51:56.407407 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f4e4783-e237-4435-854c-798de6f16672" containerName="extract-content" Oct 06 13:51:56 crc kubenswrapper[4757]: E1006 13:51:56.407442 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f4e4783-e237-4435-854c-798de6f16672" containerName="registry-server" Oct 06 13:51:56 crc kubenswrapper[4757]: I1006 13:51:56.407453 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f4e4783-e237-4435-854c-798de6f16672" containerName="registry-server" Oct 06 13:51:56 crc kubenswrapper[4757]: I1006 13:51:56.407635 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="4f4e4783-e237-4435-854c-798de6f16672" containerName="registry-server" Oct 06 13:51:56 crc kubenswrapper[4757]: I1006 13:51:56.409135 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9qgx5" Oct 06 13:51:56 crc kubenswrapper[4757]: I1006 13:51:56.427240 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-9qgx5"] Oct 06 13:51:56 crc kubenswrapper[4757]: I1006 13:51:56.476875 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ae2b2b3a-6062-48ab-9a7a-08a465fae4e0-catalog-content\") pod \"community-operators-9qgx5\" (UID: \"ae2b2b3a-6062-48ab-9a7a-08a465fae4e0\") " pod="openshift-marketplace/community-operators-9qgx5" Oct 06 13:51:56 crc kubenswrapper[4757]: I1006 13:51:56.476924 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ae2b2b3a-6062-48ab-9a7a-08a465fae4e0-utilities\") pod \"community-operators-9qgx5\" (UID: \"ae2b2b3a-6062-48ab-9a7a-08a465fae4e0\") " pod="openshift-marketplace/community-operators-9qgx5" Oct 06 13:51:56 crc kubenswrapper[4757]: I1006 13:51:56.477001 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fbdp8\" (UniqueName: \"kubernetes.io/projected/ae2b2b3a-6062-48ab-9a7a-08a465fae4e0-kube-api-access-fbdp8\") pod \"community-operators-9qgx5\" (UID: \"ae2b2b3a-6062-48ab-9a7a-08a465fae4e0\") " pod="openshift-marketplace/community-operators-9qgx5" Oct 06 13:51:56 crc kubenswrapper[4757]: I1006 13:51:56.578220 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ae2b2b3a-6062-48ab-9a7a-08a465fae4e0-catalog-content\") pod \"community-operators-9qgx5\" (UID: \"ae2b2b3a-6062-48ab-9a7a-08a465fae4e0\") " pod="openshift-marketplace/community-operators-9qgx5" Oct 06 13:51:56 crc kubenswrapper[4757]: I1006 13:51:56.578285 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ae2b2b3a-6062-48ab-9a7a-08a465fae4e0-utilities\") pod \"community-operators-9qgx5\" (UID: \"ae2b2b3a-6062-48ab-9a7a-08a465fae4e0\") " pod="openshift-marketplace/community-operators-9qgx5" Oct 06 13:51:56 crc kubenswrapper[4757]: I1006 13:51:56.578362 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fbdp8\" (UniqueName: \"kubernetes.io/projected/ae2b2b3a-6062-48ab-9a7a-08a465fae4e0-kube-api-access-fbdp8\") pod \"community-operators-9qgx5\" (UID: \"ae2b2b3a-6062-48ab-9a7a-08a465fae4e0\") " pod="openshift-marketplace/community-operators-9qgx5" Oct 06 13:51:56 crc kubenswrapper[4757]: I1006 13:51:56.578787 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ae2b2b3a-6062-48ab-9a7a-08a465fae4e0-catalog-content\") pod \"community-operators-9qgx5\" (UID: \"ae2b2b3a-6062-48ab-9a7a-08a465fae4e0\") " pod="openshift-marketplace/community-operators-9qgx5" Oct 06 13:51:56 crc kubenswrapper[4757]: I1006 13:51:56.578859 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ae2b2b3a-6062-48ab-9a7a-08a465fae4e0-utilities\") pod \"community-operators-9qgx5\" (UID: \"ae2b2b3a-6062-48ab-9a7a-08a465fae4e0\") " pod="openshift-marketplace/community-operators-9qgx5" Oct 06 13:51:56 crc kubenswrapper[4757]: I1006 13:51:56.605640 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fbdp8\" (UniqueName: \"kubernetes.io/projected/ae2b2b3a-6062-48ab-9a7a-08a465fae4e0-kube-api-access-fbdp8\") pod \"community-operators-9qgx5\" (UID: \"ae2b2b3a-6062-48ab-9a7a-08a465fae4e0\") " pod="openshift-marketplace/community-operators-9qgx5" Oct 06 13:51:56 crc kubenswrapper[4757]: I1006 13:51:56.732970 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9qgx5" Oct 06 13:51:57 crc kubenswrapper[4757]: I1006 13:51:57.073872 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-9qgx5"] Oct 06 13:51:57 crc kubenswrapper[4757]: I1006 13:51:57.545042 4757 generic.go:334] "Generic (PLEG): container finished" podID="ae2b2b3a-6062-48ab-9a7a-08a465fae4e0" containerID="6ee77199ec24a5833f6276b32ec1edabec619689ec7e50188cc3c64067a4dcc6" exitCode=0 Oct 06 13:51:57 crc kubenswrapper[4757]: I1006 13:51:57.545418 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9qgx5" event={"ID":"ae2b2b3a-6062-48ab-9a7a-08a465fae4e0","Type":"ContainerDied","Data":"6ee77199ec24a5833f6276b32ec1edabec619689ec7e50188cc3c64067a4dcc6"} Oct 06 13:51:57 crc kubenswrapper[4757]: I1006 13:51:57.545455 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9qgx5" event={"ID":"ae2b2b3a-6062-48ab-9a7a-08a465fae4e0","Type":"ContainerStarted","Data":"6efc812bfe273f956530413a5bfc658444a8c0b2f48186658a1eca8002ed66ff"} Oct 06 13:51:59 crc kubenswrapper[4757]: I1006 13:51:59.559323 4757 generic.go:334] "Generic (PLEG): container finished" podID="ae2b2b3a-6062-48ab-9a7a-08a465fae4e0" containerID="ed7d2ada70f005c5bfbb68e4aeec3ff4880dacdefd9c12c37bf3a58e40d947cf" exitCode=0 Oct 06 13:51:59 crc kubenswrapper[4757]: I1006 13:51:59.559407 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9qgx5" event={"ID":"ae2b2b3a-6062-48ab-9a7a-08a465fae4e0","Type":"ContainerDied","Data":"ed7d2ada70f005c5bfbb68e4aeec3ff4880dacdefd9c12c37bf3a58e40d947cf"} Oct 06 13:52:00 crc kubenswrapper[4757]: I1006 13:52:00.571147 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9qgx5" event={"ID":"ae2b2b3a-6062-48ab-9a7a-08a465fae4e0","Type":"ContainerStarted","Data":"8a22a43d5144257a65f7d37470ca4dba38fb0a8fff562fcd92e1265e755e9c70"} Oct 06 13:52:00 crc kubenswrapper[4757]: I1006 13:52:00.602005 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-9qgx5" podStartSLOduration=2.068452527 podStartE2EDuration="4.601982645s" podCreationTimestamp="2025-10-06 13:51:56 +0000 UTC" firstStartedPulling="2025-10-06 13:51:57.547547288 +0000 UTC m=+806.044865825" lastFinishedPulling="2025-10-06 13:52:00.081077396 +0000 UTC m=+808.578395943" observedRunningTime="2025-10-06 13:52:00.599678802 +0000 UTC m=+809.096997349" watchObservedRunningTime="2025-10-06 13:52:00.601982645 +0000 UTC m=+809.099301182" Oct 06 13:52:00 crc kubenswrapper[4757]: I1006 13:52:00.934791 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-77db4df477-bh7dw" Oct 06 13:52:01 crc kubenswrapper[4757]: I1006 13:52:01.757749 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-q49jh"] Oct 06 13:52:01 crc kubenswrapper[4757]: I1006 13:52:01.761205 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-q49jh" Oct 06 13:52:01 crc kubenswrapper[4757]: I1006 13:52:01.766306 4757 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Oct 06 13:52:01 crc kubenswrapper[4757]: I1006 13:52:01.766467 4757 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-l4t27" Oct 06 13:52:01 crc kubenswrapper[4757]: I1006 13:52:01.771724 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Oct 06 13:52:01 crc kubenswrapper[4757]: I1006 13:52:01.782375 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-64bf5d555-f9gxl"] Oct 06 13:52:01 crc kubenswrapper[4757]: I1006 13:52:01.783367 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-f9gxl" Oct 06 13:52:01 crc kubenswrapper[4757]: I1006 13:52:01.785377 4757 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Oct 06 13:52:01 crc kubenswrapper[4757]: I1006 13:52:01.797074 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-64bf5d555-f9gxl"] Oct 06 13:52:01 crc kubenswrapper[4757]: I1006 13:52:01.866694 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/07dfce6e-a6a9-44b4-a3c0-2f8778c6309e-frr-conf\") pod \"frr-k8s-q49jh\" (UID: \"07dfce6e-a6a9-44b4-a3c0-2f8778c6309e\") " pod="metallb-system/frr-k8s-q49jh" Oct 06 13:52:01 crc kubenswrapper[4757]: I1006 13:52:01.866737 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/07dfce6e-a6a9-44b4-a3c0-2f8778c6309e-frr-sockets\") pod \"frr-k8s-q49jh\" (UID: \"07dfce6e-a6a9-44b4-a3c0-2f8778c6309e\") " pod="metallb-system/frr-k8s-q49jh" Oct 06 13:52:01 crc kubenswrapper[4757]: I1006 13:52:01.866766 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/07dfce6e-a6a9-44b4-a3c0-2f8778c6309e-metrics-certs\") pod \"frr-k8s-q49jh\" (UID: \"07dfce6e-a6a9-44b4-a3c0-2f8778c6309e\") " pod="metallb-system/frr-k8s-q49jh" Oct 06 13:52:01 crc kubenswrapper[4757]: I1006 13:52:01.866787 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dwnpf\" (UniqueName: \"kubernetes.io/projected/07dfce6e-a6a9-44b4-a3c0-2f8778c6309e-kube-api-access-dwnpf\") pod \"frr-k8s-q49jh\" (UID: \"07dfce6e-a6a9-44b4-a3c0-2f8778c6309e\") " pod="metallb-system/frr-k8s-q49jh" Oct 06 13:52:01 crc kubenswrapper[4757]: I1006 13:52:01.866818 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/07dfce6e-a6a9-44b4-a3c0-2f8778c6309e-metrics\") pod \"frr-k8s-q49jh\" (UID: \"07dfce6e-a6a9-44b4-a3c0-2f8778c6309e\") " pod="metallb-system/frr-k8s-q49jh" Oct 06 13:52:01 crc kubenswrapper[4757]: I1006 13:52:01.866837 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/0f74c411-c34f-47f6-aab7-3c80d0e9cbeb-cert\") pod \"frr-k8s-webhook-server-64bf5d555-f9gxl\" (UID: \"0f74c411-c34f-47f6-aab7-3c80d0e9cbeb\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-f9gxl" Oct 06 13:52:01 crc kubenswrapper[4757]: I1006 13:52:01.866858 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/07dfce6e-a6a9-44b4-a3c0-2f8778c6309e-frr-startup\") pod \"frr-k8s-q49jh\" (UID: \"07dfce6e-a6a9-44b4-a3c0-2f8778c6309e\") " pod="metallb-system/frr-k8s-q49jh" Oct 06 13:52:01 crc kubenswrapper[4757]: I1006 13:52:01.866886 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-krjqd\" (UniqueName: \"kubernetes.io/projected/0f74c411-c34f-47f6-aab7-3c80d0e9cbeb-kube-api-access-krjqd\") pod \"frr-k8s-webhook-server-64bf5d555-f9gxl\" (UID: \"0f74c411-c34f-47f6-aab7-3c80d0e9cbeb\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-f9gxl" Oct 06 13:52:01 crc kubenswrapper[4757]: I1006 13:52:01.866902 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/07dfce6e-a6a9-44b4-a3c0-2f8778c6309e-reloader\") pod \"frr-k8s-q49jh\" (UID: \"07dfce6e-a6a9-44b4-a3c0-2f8778c6309e\") " pod="metallb-system/frr-k8s-q49jh" Oct 06 13:52:01 crc kubenswrapper[4757]: I1006 13:52:01.906346 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-2cj4z"] Oct 06 13:52:01 crc kubenswrapper[4757]: I1006 13:52:01.907857 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-2cj4z" Oct 06 13:52:01 crc kubenswrapper[4757]: I1006 13:52:01.913008 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-68d546b9d8-8sfhp"] Oct 06 13:52:01 crc kubenswrapper[4757]: I1006 13:52:01.913914 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-68d546b9d8-8sfhp" Oct 06 13:52:01 crc kubenswrapper[4757]: I1006 13:52:01.914497 4757 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Oct 06 13:52:01 crc kubenswrapper[4757]: I1006 13:52:01.914673 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Oct 06 13:52:01 crc kubenswrapper[4757]: I1006 13:52:01.914766 4757 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Oct 06 13:52:01 crc kubenswrapper[4757]: I1006 13:52:01.915069 4757 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-tx7g6" Oct 06 13:52:01 crc kubenswrapper[4757]: I1006 13:52:01.915470 4757 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Oct 06 13:52:01 crc kubenswrapper[4757]: I1006 13:52:01.955509 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-68d546b9d8-8sfhp"] Oct 06 13:52:01 crc kubenswrapper[4757]: I1006 13:52:01.967640 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/07dfce6e-a6a9-44b4-a3c0-2f8778c6309e-frr-sockets\") pod \"frr-k8s-q49jh\" (UID: \"07dfce6e-a6a9-44b4-a3c0-2f8778c6309e\") " pod="metallb-system/frr-k8s-q49jh" Oct 06 13:52:01 crc kubenswrapper[4757]: I1006 13:52:01.967681 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/07dfce6e-a6a9-44b4-a3c0-2f8778c6309e-frr-conf\") pod \"frr-k8s-q49jh\" (UID: \"07dfce6e-a6a9-44b4-a3c0-2f8778c6309e\") " pod="metallb-system/frr-k8s-q49jh" Oct 06 13:52:01 crc kubenswrapper[4757]: I1006 13:52:01.967714 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/07dfce6e-a6a9-44b4-a3c0-2f8778c6309e-metrics-certs\") pod \"frr-k8s-q49jh\" (UID: \"07dfce6e-a6a9-44b4-a3c0-2f8778c6309e\") " pod="metallb-system/frr-k8s-q49jh" Oct 06 13:52:01 crc kubenswrapper[4757]: I1006 13:52:01.967735 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dwnpf\" (UniqueName: \"kubernetes.io/projected/07dfce6e-a6a9-44b4-a3c0-2f8778c6309e-kube-api-access-dwnpf\") pod \"frr-k8s-q49jh\" (UID: \"07dfce6e-a6a9-44b4-a3c0-2f8778c6309e\") " pod="metallb-system/frr-k8s-q49jh" Oct 06 13:52:01 crc kubenswrapper[4757]: I1006 13:52:01.967762 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/07dfce6e-a6a9-44b4-a3c0-2f8778c6309e-metrics\") pod \"frr-k8s-q49jh\" (UID: \"07dfce6e-a6a9-44b4-a3c0-2f8778c6309e\") " pod="metallb-system/frr-k8s-q49jh" Oct 06 13:52:01 crc kubenswrapper[4757]: I1006 13:52:01.967778 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/0f74c411-c34f-47f6-aab7-3c80d0e9cbeb-cert\") pod \"frr-k8s-webhook-server-64bf5d555-f9gxl\" (UID: \"0f74c411-c34f-47f6-aab7-3c80d0e9cbeb\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-f9gxl" Oct 06 13:52:01 crc kubenswrapper[4757]: I1006 13:52:01.967796 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/07dfce6e-a6a9-44b4-a3c0-2f8778c6309e-frr-startup\") pod \"frr-k8s-q49jh\" (UID: \"07dfce6e-a6a9-44b4-a3c0-2f8778c6309e\") " pod="metallb-system/frr-k8s-q49jh" Oct 06 13:52:01 crc kubenswrapper[4757]: I1006 13:52:01.967823 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/07dfce6e-a6a9-44b4-a3c0-2f8778c6309e-reloader\") pod \"frr-k8s-q49jh\" (UID: \"07dfce6e-a6a9-44b4-a3c0-2f8778c6309e\") " pod="metallb-system/frr-k8s-q49jh" Oct 06 13:52:01 crc kubenswrapper[4757]: I1006 13:52:01.967843 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-krjqd\" (UniqueName: \"kubernetes.io/projected/0f74c411-c34f-47f6-aab7-3c80d0e9cbeb-kube-api-access-krjqd\") pod \"frr-k8s-webhook-server-64bf5d555-f9gxl\" (UID: \"0f74c411-c34f-47f6-aab7-3c80d0e9cbeb\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-f9gxl" Oct 06 13:52:01 crc kubenswrapper[4757]: I1006 13:52:01.968602 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/07dfce6e-a6a9-44b4-a3c0-2f8778c6309e-frr-sockets\") pod \"frr-k8s-q49jh\" (UID: \"07dfce6e-a6a9-44b4-a3c0-2f8778c6309e\") " pod="metallb-system/frr-k8s-q49jh" Oct 06 13:52:01 crc kubenswrapper[4757]: I1006 13:52:01.968786 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/07dfce6e-a6a9-44b4-a3c0-2f8778c6309e-frr-conf\") pod \"frr-k8s-q49jh\" (UID: \"07dfce6e-a6a9-44b4-a3c0-2f8778c6309e\") " pod="metallb-system/frr-k8s-q49jh" Oct 06 13:52:01 crc kubenswrapper[4757]: I1006 13:52:01.970468 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/07dfce6e-a6a9-44b4-a3c0-2f8778c6309e-metrics\") pod \"frr-k8s-q49jh\" (UID: \"07dfce6e-a6a9-44b4-a3c0-2f8778c6309e\") " pod="metallb-system/frr-k8s-q49jh" Oct 06 13:52:01 crc kubenswrapper[4757]: I1006 13:52:01.970936 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/07dfce6e-a6a9-44b4-a3c0-2f8778c6309e-reloader\") pod \"frr-k8s-q49jh\" (UID: \"07dfce6e-a6a9-44b4-a3c0-2f8778c6309e\") " pod="metallb-system/frr-k8s-q49jh" Oct 06 13:52:01 crc kubenswrapper[4757]: I1006 13:52:01.971132 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/07dfce6e-a6a9-44b4-a3c0-2f8778c6309e-frr-startup\") pod \"frr-k8s-q49jh\" (UID: \"07dfce6e-a6a9-44b4-a3c0-2f8778c6309e\") " pod="metallb-system/frr-k8s-q49jh" Oct 06 13:52:01 crc kubenswrapper[4757]: E1006 13:52:01.971257 4757 secret.go:188] Couldn't get secret metallb-system/frr-k8s-webhook-server-cert: secret "frr-k8s-webhook-server-cert" not found Oct 06 13:52:01 crc kubenswrapper[4757]: E1006 13:52:01.971295 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0f74c411-c34f-47f6-aab7-3c80d0e9cbeb-cert podName:0f74c411-c34f-47f6-aab7-3c80d0e9cbeb nodeName:}" failed. No retries permitted until 2025-10-06 13:52:02.471284011 +0000 UTC m=+810.968602548 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/0f74c411-c34f-47f6-aab7-3c80d0e9cbeb-cert") pod "frr-k8s-webhook-server-64bf5d555-f9gxl" (UID: "0f74c411-c34f-47f6-aab7-3c80d0e9cbeb") : secret "frr-k8s-webhook-server-cert" not found Oct 06 13:52:01 crc kubenswrapper[4757]: I1006 13:52:01.991773 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/07dfce6e-a6a9-44b4-a3c0-2f8778c6309e-metrics-certs\") pod \"frr-k8s-q49jh\" (UID: \"07dfce6e-a6a9-44b4-a3c0-2f8778c6309e\") " pod="metallb-system/frr-k8s-q49jh" Oct 06 13:52:01 crc kubenswrapper[4757]: I1006 13:52:01.996633 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dwnpf\" (UniqueName: \"kubernetes.io/projected/07dfce6e-a6a9-44b4-a3c0-2f8778c6309e-kube-api-access-dwnpf\") pod \"frr-k8s-q49jh\" (UID: \"07dfce6e-a6a9-44b4-a3c0-2f8778c6309e\") " pod="metallb-system/frr-k8s-q49jh" Oct 06 13:52:02 crc kubenswrapper[4757]: I1006 13:52:02.010288 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-krjqd\" (UniqueName: \"kubernetes.io/projected/0f74c411-c34f-47f6-aab7-3c80d0e9cbeb-kube-api-access-krjqd\") pod \"frr-k8s-webhook-server-64bf5d555-f9gxl\" (UID: \"0f74c411-c34f-47f6-aab7-3c80d0e9cbeb\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-f9gxl" Oct 06 13:52:02 crc kubenswrapper[4757]: I1006 13:52:02.068950 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/f17e2038-326e-47e1-93c1-5d691c69bd16-metallb-excludel2\") pod \"speaker-2cj4z\" (UID: \"f17e2038-326e-47e1-93c1-5d691c69bd16\") " pod="metallb-system/speaker-2cj4z" Oct 06 13:52:02 crc kubenswrapper[4757]: I1006 13:52:02.069033 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f17e2038-326e-47e1-93c1-5d691c69bd16-metrics-certs\") pod \"speaker-2cj4z\" (UID: \"f17e2038-326e-47e1-93c1-5d691c69bd16\") " pod="metallb-system/speaker-2cj4z" Oct 06 13:52:02 crc kubenswrapper[4757]: I1006 13:52:02.069062 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dq8sk\" (UniqueName: \"kubernetes.io/projected/f17e2038-326e-47e1-93c1-5d691c69bd16-kube-api-access-dq8sk\") pod \"speaker-2cj4z\" (UID: \"f17e2038-326e-47e1-93c1-5d691c69bd16\") " pod="metallb-system/speaker-2cj4z" Oct 06 13:52:02 crc kubenswrapper[4757]: I1006 13:52:02.069105 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/f17e2038-326e-47e1-93c1-5d691c69bd16-memberlist\") pod \"speaker-2cj4z\" (UID: \"f17e2038-326e-47e1-93c1-5d691c69bd16\") " pod="metallb-system/speaker-2cj4z" Oct 06 13:52:02 crc kubenswrapper[4757]: I1006 13:52:02.069126 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/7ad4c4b7-577f-4996-a45d-36026559eb4d-cert\") pod \"controller-68d546b9d8-8sfhp\" (UID: \"7ad4c4b7-577f-4996-a45d-36026559eb4d\") " pod="metallb-system/controller-68d546b9d8-8sfhp" Oct 06 13:52:02 crc kubenswrapper[4757]: I1006 13:52:02.069147 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bqbdq\" (UniqueName: \"kubernetes.io/projected/7ad4c4b7-577f-4996-a45d-36026559eb4d-kube-api-access-bqbdq\") pod \"controller-68d546b9d8-8sfhp\" (UID: \"7ad4c4b7-577f-4996-a45d-36026559eb4d\") " pod="metallb-system/controller-68d546b9d8-8sfhp" Oct 06 13:52:02 crc kubenswrapper[4757]: I1006 13:52:02.069177 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7ad4c4b7-577f-4996-a45d-36026559eb4d-metrics-certs\") pod \"controller-68d546b9d8-8sfhp\" (UID: \"7ad4c4b7-577f-4996-a45d-36026559eb4d\") " pod="metallb-system/controller-68d546b9d8-8sfhp" Oct 06 13:52:02 crc kubenswrapper[4757]: I1006 13:52:02.081811 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-q49jh" Oct 06 13:52:02 crc kubenswrapper[4757]: I1006 13:52:02.170677 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dq8sk\" (UniqueName: \"kubernetes.io/projected/f17e2038-326e-47e1-93c1-5d691c69bd16-kube-api-access-dq8sk\") pod \"speaker-2cj4z\" (UID: \"f17e2038-326e-47e1-93c1-5d691c69bd16\") " pod="metallb-system/speaker-2cj4z" Oct 06 13:52:02 crc kubenswrapper[4757]: I1006 13:52:02.170730 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/f17e2038-326e-47e1-93c1-5d691c69bd16-memberlist\") pod \"speaker-2cj4z\" (UID: \"f17e2038-326e-47e1-93c1-5d691c69bd16\") " pod="metallb-system/speaker-2cj4z" Oct 06 13:52:02 crc kubenswrapper[4757]: I1006 13:52:02.170746 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/7ad4c4b7-577f-4996-a45d-36026559eb4d-cert\") pod \"controller-68d546b9d8-8sfhp\" (UID: \"7ad4c4b7-577f-4996-a45d-36026559eb4d\") " pod="metallb-system/controller-68d546b9d8-8sfhp" Oct 06 13:52:02 crc kubenswrapper[4757]: I1006 13:52:02.170767 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bqbdq\" (UniqueName: \"kubernetes.io/projected/7ad4c4b7-577f-4996-a45d-36026559eb4d-kube-api-access-bqbdq\") pod \"controller-68d546b9d8-8sfhp\" (UID: \"7ad4c4b7-577f-4996-a45d-36026559eb4d\") " pod="metallb-system/controller-68d546b9d8-8sfhp" Oct 06 13:52:02 crc kubenswrapper[4757]: I1006 13:52:02.170798 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7ad4c4b7-577f-4996-a45d-36026559eb4d-metrics-certs\") pod \"controller-68d546b9d8-8sfhp\" (UID: \"7ad4c4b7-577f-4996-a45d-36026559eb4d\") " pod="metallb-system/controller-68d546b9d8-8sfhp" Oct 06 13:52:02 crc kubenswrapper[4757]: I1006 13:52:02.170822 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/f17e2038-326e-47e1-93c1-5d691c69bd16-metallb-excludel2\") pod \"speaker-2cj4z\" (UID: \"f17e2038-326e-47e1-93c1-5d691c69bd16\") " pod="metallb-system/speaker-2cj4z" Oct 06 13:52:02 crc kubenswrapper[4757]: I1006 13:52:02.170870 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f17e2038-326e-47e1-93c1-5d691c69bd16-metrics-certs\") pod \"speaker-2cj4z\" (UID: \"f17e2038-326e-47e1-93c1-5d691c69bd16\") " pod="metallb-system/speaker-2cj4z" Oct 06 13:52:02 crc kubenswrapper[4757]: E1006 13:52:02.170982 4757 secret.go:188] Couldn't get secret metallb-system/speaker-certs-secret: secret "speaker-certs-secret" not found Oct 06 13:52:02 crc kubenswrapper[4757]: E1006 13:52:02.171026 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f17e2038-326e-47e1-93c1-5d691c69bd16-metrics-certs podName:f17e2038-326e-47e1-93c1-5d691c69bd16 nodeName:}" failed. No retries permitted until 2025-10-06 13:52:02.671012573 +0000 UTC m=+811.168331110 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/f17e2038-326e-47e1-93c1-5d691c69bd16-metrics-certs") pod "speaker-2cj4z" (UID: "f17e2038-326e-47e1-93c1-5d691c69bd16") : secret "speaker-certs-secret" not found Oct 06 13:52:02 crc kubenswrapper[4757]: E1006 13:52:02.171436 4757 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Oct 06 13:52:02 crc kubenswrapper[4757]: E1006 13:52:02.171514 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f17e2038-326e-47e1-93c1-5d691c69bd16-memberlist podName:f17e2038-326e-47e1-93c1-5d691c69bd16 nodeName:}" failed. No retries permitted until 2025-10-06 13:52:02.671496779 +0000 UTC m=+811.168815316 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/f17e2038-326e-47e1-93c1-5d691c69bd16-memberlist") pod "speaker-2cj4z" (UID: "f17e2038-326e-47e1-93c1-5d691c69bd16") : secret "metallb-memberlist" not found Oct 06 13:52:02 crc kubenswrapper[4757]: I1006 13:52:02.172058 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/f17e2038-326e-47e1-93c1-5d691c69bd16-metallb-excludel2\") pod \"speaker-2cj4z\" (UID: \"f17e2038-326e-47e1-93c1-5d691c69bd16\") " pod="metallb-system/speaker-2cj4z" Oct 06 13:52:02 crc kubenswrapper[4757]: I1006 13:52:02.175863 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7ad4c4b7-577f-4996-a45d-36026559eb4d-metrics-certs\") pod \"controller-68d546b9d8-8sfhp\" (UID: \"7ad4c4b7-577f-4996-a45d-36026559eb4d\") " pod="metallb-system/controller-68d546b9d8-8sfhp" Oct 06 13:52:02 crc kubenswrapper[4757]: I1006 13:52:02.177641 4757 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Oct 06 13:52:02 crc kubenswrapper[4757]: I1006 13:52:02.185158 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/7ad4c4b7-577f-4996-a45d-36026559eb4d-cert\") pod \"controller-68d546b9d8-8sfhp\" (UID: \"7ad4c4b7-577f-4996-a45d-36026559eb4d\") " pod="metallb-system/controller-68d546b9d8-8sfhp" Oct 06 13:52:02 crc kubenswrapper[4757]: I1006 13:52:02.188596 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dq8sk\" (UniqueName: \"kubernetes.io/projected/f17e2038-326e-47e1-93c1-5d691c69bd16-kube-api-access-dq8sk\") pod \"speaker-2cj4z\" (UID: \"f17e2038-326e-47e1-93c1-5d691c69bd16\") " pod="metallb-system/speaker-2cj4z" Oct 06 13:52:02 crc kubenswrapper[4757]: I1006 13:52:02.193320 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bqbdq\" (UniqueName: \"kubernetes.io/projected/7ad4c4b7-577f-4996-a45d-36026559eb4d-kube-api-access-bqbdq\") pod \"controller-68d546b9d8-8sfhp\" (UID: \"7ad4c4b7-577f-4996-a45d-36026559eb4d\") " pod="metallb-system/controller-68d546b9d8-8sfhp" Oct 06 13:52:02 crc kubenswrapper[4757]: I1006 13:52:02.237385 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-68d546b9d8-8sfhp" Oct 06 13:52:02 crc kubenswrapper[4757]: I1006 13:52:02.474987 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/0f74c411-c34f-47f6-aab7-3c80d0e9cbeb-cert\") pod \"frr-k8s-webhook-server-64bf5d555-f9gxl\" (UID: \"0f74c411-c34f-47f6-aab7-3c80d0e9cbeb\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-f9gxl" Oct 06 13:52:02 crc kubenswrapper[4757]: I1006 13:52:02.479751 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/0f74c411-c34f-47f6-aab7-3c80d0e9cbeb-cert\") pod \"frr-k8s-webhook-server-64bf5d555-f9gxl\" (UID: \"0f74c411-c34f-47f6-aab7-3c80d0e9cbeb\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-f9gxl" Oct 06 13:52:02 crc kubenswrapper[4757]: I1006 13:52:02.585423 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-q49jh" event={"ID":"07dfce6e-a6a9-44b4-a3c0-2f8778c6309e","Type":"ContainerStarted","Data":"7746b915e50c7e7ef922a50bb18c6177e6d87ac690e4b297587291085c0b7ae0"} Oct 06 13:52:02 crc kubenswrapper[4757]: I1006 13:52:02.653158 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-68d546b9d8-8sfhp"] Oct 06 13:52:02 crc kubenswrapper[4757]: W1006 13:52:02.667640 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7ad4c4b7_577f_4996_a45d_36026559eb4d.slice/crio-aaf0f1353546d26b44f42cf439647d33b81080abaf4b7ca9858d4d33f4df3a29 WatchSource:0}: Error finding container aaf0f1353546d26b44f42cf439647d33b81080abaf4b7ca9858d4d33f4df3a29: Status 404 returned error can't find the container with id aaf0f1353546d26b44f42cf439647d33b81080abaf4b7ca9858d4d33f4df3a29 Oct 06 13:52:02 crc kubenswrapper[4757]: I1006 13:52:02.681774 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f17e2038-326e-47e1-93c1-5d691c69bd16-metrics-certs\") pod \"speaker-2cj4z\" (UID: \"f17e2038-326e-47e1-93c1-5d691c69bd16\") " pod="metallb-system/speaker-2cj4z" Oct 06 13:52:02 crc kubenswrapper[4757]: I1006 13:52:02.681872 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/f17e2038-326e-47e1-93c1-5d691c69bd16-memberlist\") pod \"speaker-2cj4z\" (UID: \"f17e2038-326e-47e1-93c1-5d691c69bd16\") " pod="metallb-system/speaker-2cj4z" Oct 06 13:52:02 crc kubenswrapper[4757]: E1006 13:52:02.682049 4757 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Oct 06 13:52:02 crc kubenswrapper[4757]: E1006 13:52:02.682173 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f17e2038-326e-47e1-93c1-5d691c69bd16-memberlist podName:f17e2038-326e-47e1-93c1-5d691c69bd16 nodeName:}" failed. No retries permitted until 2025-10-06 13:52:03.682149392 +0000 UTC m=+812.179467969 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/f17e2038-326e-47e1-93c1-5d691c69bd16-memberlist") pod "speaker-2cj4z" (UID: "f17e2038-326e-47e1-93c1-5d691c69bd16") : secret "metallb-memberlist" not found Oct 06 13:52:02 crc kubenswrapper[4757]: I1006 13:52:02.687405 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f17e2038-326e-47e1-93c1-5d691c69bd16-metrics-certs\") pod \"speaker-2cj4z\" (UID: \"f17e2038-326e-47e1-93c1-5d691c69bd16\") " pod="metallb-system/speaker-2cj4z" Oct 06 13:52:02 crc kubenswrapper[4757]: I1006 13:52:02.703587 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-f9gxl" Oct 06 13:52:02 crc kubenswrapper[4757]: I1006 13:52:02.792785 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-gz6s7"] Oct 06 13:52:02 crc kubenswrapper[4757]: I1006 13:52:02.793892 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gz6s7" Oct 06 13:52:02 crc kubenswrapper[4757]: I1006 13:52:02.806907 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-gz6s7"] Oct 06 13:52:02 crc kubenswrapper[4757]: I1006 13:52:02.889875 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/14a98b27-3a97-42c2-bd77-e23eec5c8ecc-utilities\") pod \"certified-operators-gz6s7\" (UID: \"14a98b27-3a97-42c2-bd77-e23eec5c8ecc\") " pod="openshift-marketplace/certified-operators-gz6s7" Oct 06 13:52:02 crc kubenswrapper[4757]: I1006 13:52:02.889921 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/14a98b27-3a97-42c2-bd77-e23eec5c8ecc-catalog-content\") pod \"certified-operators-gz6s7\" (UID: \"14a98b27-3a97-42c2-bd77-e23eec5c8ecc\") " pod="openshift-marketplace/certified-operators-gz6s7" Oct 06 13:52:02 crc kubenswrapper[4757]: I1006 13:52:02.890021 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qn65d\" (UniqueName: \"kubernetes.io/projected/14a98b27-3a97-42c2-bd77-e23eec5c8ecc-kube-api-access-qn65d\") pod \"certified-operators-gz6s7\" (UID: \"14a98b27-3a97-42c2-bd77-e23eec5c8ecc\") " pod="openshift-marketplace/certified-operators-gz6s7" Oct 06 13:52:02 crc kubenswrapper[4757]: I1006 13:52:02.991215 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qn65d\" (UniqueName: \"kubernetes.io/projected/14a98b27-3a97-42c2-bd77-e23eec5c8ecc-kube-api-access-qn65d\") pod \"certified-operators-gz6s7\" (UID: \"14a98b27-3a97-42c2-bd77-e23eec5c8ecc\") " pod="openshift-marketplace/certified-operators-gz6s7" Oct 06 13:52:02 crc kubenswrapper[4757]: I1006 13:52:02.991509 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/14a98b27-3a97-42c2-bd77-e23eec5c8ecc-utilities\") pod \"certified-operators-gz6s7\" (UID: \"14a98b27-3a97-42c2-bd77-e23eec5c8ecc\") " pod="openshift-marketplace/certified-operators-gz6s7" Oct 06 13:52:02 crc kubenswrapper[4757]: I1006 13:52:02.991537 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/14a98b27-3a97-42c2-bd77-e23eec5c8ecc-catalog-content\") pod \"certified-operators-gz6s7\" (UID: \"14a98b27-3a97-42c2-bd77-e23eec5c8ecc\") " pod="openshift-marketplace/certified-operators-gz6s7" Oct 06 13:52:02 crc kubenswrapper[4757]: I1006 13:52:02.991965 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/14a98b27-3a97-42c2-bd77-e23eec5c8ecc-catalog-content\") pod \"certified-operators-gz6s7\" (UID: \"14a98b27-3a97-42c2-bd77-e23eec5c8ecc\") " pod="openshift-marketplace/certified-operators-gz6s7" Oct 06 13:52:02 crc kubenswrapper[4757]: I1006 13:52:02.992023 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/14a98b27-3a97-42c2-bd77-e23eec5c8ecc-utilities\") pod \"certified-operators-gz6s7\" (UID: \"14a98b27-3a97-42c2-bd77-e23eec5c8ecc\") " pod="openshift-marketplace/certified-operators-gz6s7" Oct 06 13:52:03 crc kubenswrapper[4757]: I1006 13:52:03.010780 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qn65d\" (UniqueName: \"kubernetes.io/projected/14a98b27-3a97-42c2-bd77-e23eec5c8ecc-kube-api-access-qn65d\") pod \"certified-operators-gz6s7\" (UID: \"14a98b27-3a97-42c2-bd77-e23eec5c8ecc\") " pod="openshift-marketplace/certified-operators-gz6s7" Oct 06 13:52:03 crc kubenswrapper[4757]: I1006 13:52:03.132885 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gz6s7" Oct 06 13:52:03 crc kubenswrapper[4757]: I1006 13:52:03.168724 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-64bf5d555-f9gxl"] Oct 06 13:52:03 crc kubenswrapper[4757]: I1006 13:52:03.592001 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-68d546b9d8-8sfhp" event={"ID":"7ad4c4b7-577f-4996-a45d-36026559eb4d","Type":"ContainerStarted","Data":"1b542cb58b595085e05749d6091281c6fc70647fd6e217c830644ca34013c09c"} Oct 06 13:52:03 crc kubenswrapper[4757]: I1006 13:52:03.592394 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-68d546b9d8-8sfhp" event={"ID":"7ad4c4b7-577f-4996-a45d-36026559eb4d","Type":"ContainerStarted","Data":"43c473dbb14c744f47cf1847f527378e0bb9f150c08813ad8c04b0f8c4a2d7dc"} Oct 06 13:52:03 crc kubenswrapper[4757]: I1006 13:52:03.592404 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-68d546b9d8-8sfhp" event={"ID":"7ad4c4b7-577f-4996-a45d-36026559eb4d","Type":"ContainerStarted","Data":"aaf0f1353546d26b44f42cf439647d33b81080abaf4b7ca9858d4d33f4df3a29"} Oct 06 13:52:03 crc kubenswrapper[4757]: I1006 13:52:03.592416 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-68d546b9d8-8sfhp" Oct 06 13:52:03 crc kubenswrapper[4757]: I1006 13:52:03.593375 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-f9gxl" event={"ID":"0f74c411-c34f-47f6-aab7-3c80d0e9cbeb","Type":"ContainerStarted","Data":"4cdbc9ff936eb57fabfc417e4e6a8ab371f303618c2c4e1b0555f2c6d60a3f28"} Oct 06 13:52:03 crc kubenswrapper[4757]: I1006 13:52:03.616866 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-68d546b9d8-8sfhp" podStartSLOduration=2.616851423 podStartE2EDuration="2.616851423s" podCreationTimestamp="2025-10-06 13:52:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:52:03.613453735 +0000 UTC m=+812.110772272" watchObservedRunningTime="2025-10-06 13:52:03.616851423 +0000 UTC m=+812.114169960" Oct 06 13:52:03 crc kubenswrapper[4757]: I1006 13:52:03.629256 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-gz6s7"] Oct 06 13:52:03 crc kubenswrapper[4757]: W1006 13:52:03.646067 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod14a98b27_3a97_42c2_bd77_e23eec5c8ecc.slice/crio-9f4c1cee1b07db7f2b75eb4606889a35c9d1a3d0d254fd73bdebd281f5bdd653 WatchSource:0}: Error finding container 9f4c1cee1b07db7f2b75eb4606889a35c9d1a3d0d254fd73bdebd281f5bdd653: Status 404 returned error can't find the container with id 9f4c1cee1b07db7f2b75eb4606889a35c9d1a3d0d254fd73bdebd281f5bdd653 Oct 06 13:52:03 crc kubenswrapper[4757]: I1006 13:52:03.700968 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/f17e2038-326e-47e1-93c1-5d691c69bd16-memberlist\") pod \"speaker-2cj4z\" (UID: \"f17e2038-326e-47e1-93c1-5d691c69bd16\") " pod="metallb-system/speaker-2cj4z" Oct 06 13:52:03 crc kubenswrapper[4757]: I1006 13:52:03.709167 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/f17e2038-326e-47e1-93c1-5d691c69bd16-memberlist\") pod \"speaker-2cj4z\" (UID: \"f17e2038-326e-47e1-93c1-5d691c69bd16\") " pod="metallb-system/speaker-2cj4z" Oct 06 13:52:03 crc kubenswrapper[4757]: I1006 13:52:03.727185 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-2cj4z" Oct 06 13:52:03 crc kubenswrapper[4757]: W1006 13:52:03.752506 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf17e2038_326e_47e1_93c1_5d691c69bd16.slice/crio-46cc0d3920d479b8412a17dcd539039df84fbbee8543f985a1dd50818e3061c8 WatchSource:0}: Error finding container 46cc0d3920d479b8412a17dcd539039df84fbbee8543f985a1dd50818e3061c8: Status 404 returned error can't find the container with id 46cc0d3920d479b8412a17dcd539039df84fbbee8543f985a1dd50818e3061c8 Oct 06 13:52:04 crc kubenswrapper[4757]: I1006 13:52:04.606070 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-2cj4z" event={"ID":"f17e2038-326e-47e1-93c1-5d691c69bd16","Type":"ContainerStarted","Data":"cbdbdd73842a653f0979667162f62bc1c3dbee5f6be491754ee47de1886f16a7"} Oct 06 13:52:04 crc kubenswrapper[4757]: I1006 13:52:04.606518 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-2cj4z" event={"ID":"f17e2038-326e-47e1-93c1-5d691c69bd16","Type":"ContainerStarted","Data":"46cc0d3920d479b8412a17dcd539039df84fbbee8543f985a1dd50818e3061c8"} Oct 06 13:52:04 crc kubenswrapper[4757]: I1006 13:52:04.612151 4757 generic.go:334] "Generic (PLEG): container finished" podID="14a98b27-3a97-42c2-bd77-e23eec5c8ecc" containerID="a52eac5aa0b0e96898a16a978e87ef22fcfa9a90f5c939f24b934558ecf4f054" exitCode=0 Oct 06 13:52:04 crc kubenswrapper[4757]: I1006 13:52:04.612257 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gz6s7" event={"ID":"14a98b27-3a97-42c2-bd77-e23eec5c8ecc","Type":"ContainerDied","Data":"a52eac5aa0b0e96898a16a978e87ef22fcfa9a90f5c939f24b934558ecf4f054"} Oct 06 13:52:04 crc kubenswrapper[4757]: I1006 13:52:04.612284 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gz6s7" event={"ID":"14a98b27-3a97-42c2-bd77-e23eec5c8ecc","Type":"ContainerStarted","Data":"9f4c1cee1b07db7f2b75eb4606889a35c9d1a3d0d254fd73bdebd281f5bdd653"} Oct 06 13:52:05 crc kubenswrapper[4757]: I1006 13:52:05.626696 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-2cj4z" event={"ID":"f17e2038-326e-47e1-93c1-5d691c69bd16","Type":"ContainerStarted","Data":"dec8a11d7d8adaa4a2218406a87f84be9f431a8209e877fc3ff46bc64ee69356"} Oct 06 13:52:05 crc kubenswrapper[4757]: I1006 13:52:05.628265 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-2cj4z" Oct 06 13:52:05 crc kubenswrapper[4757]: I1006 13:52:05.649046 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-2cj4z" podStartSLOduration=4.649026164 podStartE2EDuration="4.649026164s" podCreationTimestamp="2025-10-06 13:52:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:52:05.643520119 +0000 UTC m=+814.140838666" watchObservedRunningTime="2025-10-06 13:52:05.649026164 +0000 UTC m=+814.146344721" Oct 06 13:52:06 crc kubenswrapper[4757]: I1006 13:52:06.633894 4757 generic.go:334] "Generic (PLEG): container finished" podID="14a98b27-3a97-42c2-bd77-e23eec5c8ecc" containerID="2a7231a65fc3a6f865634cc46081df96af84e001bc57b61668c9a1ac2ff826bc" exitCode=0 Oct 06 13:52:06 crc kubenswrapper[4757]: I1006 13:52:06.634001 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gz6s7" event={"ID":"14a98b27-3a97-42c2-bd77-e23eec5c8ecc","Type":"ContainerDied","Data":"2a7231a65fc3a6f865634cc46081df96af84e001bc57b61668c9a1ac2ff826bc"} Oct 06 13:52:06 crc kubenswrapper[4757]: I1006 13:52:06.733081 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-9qgx5" Oct 06 13:52:06 crc kubenswrapper[4757]: I1006 13:52:06.733344 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-9qgx5" Oct 06 13:52:06 crc kubenswrapper[4757]: I1006 13:52:06.788544 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-9qgx5" Oct 06 13:52:07 crc kubenswrapper[4757]: I1006 13:52:07.694282 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-9qgx5" Oct 06 13:52:08 crc kubenswrapper[4757]: I1006 13:52:08.967222 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-9qgx5"] Oct 06 13:52:10 crc kubenswrapper[4757]: I1006 13:52:10.670752 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-f9gxl" event={"ID":"0f74c411-c34f-47f6-aab7-3c80d0e9cbeb","Type":"ContainerStarted","Data":"41f5f5a2c02e73a42edc17c03b9cd95826f3c82a6d577423d8beb8d4cedc1955"} Oct 06 13:52:10 crc kubenswrapper[4757]: I1006 13:52:10.674186 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-f9gxl" Oct 06 13:52:10 crc kubenswrapper[4757]: I1006 13:52:10.682676 4757 generic.go:334] "Generic (PLEG): container finished" podID="07dfce6e-a6a9-44b4-a3c0-2f8778c6309e" containerID="1d4caebaa80200ab14a94c6c44ddebfd30d57ed20a7ccf84a6523c83f8e24e64" exitCode=0 Oct 06 13:52:10 crc kubenswrapper[4757]: I1006 13:52:10.683321 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-9qgx5" podUID="ae2b2b3a-6062-48ab-9a7a-08a465fae4e0" containerName="registry-server" containerID="cri-o://8a22a43d5144257a65f7d37470ca4dba38fb0a8fff562fcd92e1265e755e9c70" gracePeriod=2 Oct 06 13:52:10 crc kubenswrapper[4757]: I1006 13:52:10.689316 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gz6s7" event={"ID":"14a98b27-3a97-42c2-bd77-e23eec5c8ecc","Type":"ContainerStarted","Data":"3d99684d83b316383bcf734aa8043e1d011bc8f3427de26edc6cfb08244b5abe"} Oct 06 13:52:10 crc kubenswrapper[4757]: I1006 13:52:10.689435 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-q49jh" event={"ID":"07dfce6e-a6a9-44b4-a3c0-2f8778c6309e","Type":"ContainerDied","Data":"1d4caebaa80200ab14a94c6c44ddebfd30d57ed20a7ccf84a6523c83f8e24e64"} Oct 06 13:52:10 crc kubenswrapper[4757]: I1006 13:52:10.702895 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-f9gxl" podStartSLOduration=2.972489416 podStartE2EDuration="9.702879859s" podCreationTimestamp="2025-10-06 13:52:01 +0000 UTC" firstStartedPulling="2025-10-06 13:52:03.19358347 +0000 UTC m=+811.690902007" lastFinishedPulling="2025-10-06 13:52:09.923973913 +0000 UTC m=+818.421292450" observedRunningTime="2025-10-06 13:52:10.700897785 +0000 UTC m=+819.198216332" watchObservedRunningTime="2025-10-06 13:52:10.702879859 +0000 UTC m=+819.200198396" Oct 06 13:52:10 crc kubenswrapper[4757]: I1006 13:52:10.770591 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-gz6s7" podStartSLOduration=3.593520548 podStartE2EDuration="8.770570902s" podCreationTimestamp="2025-10-06 13:52:02 +0000 UTC" firstStartedPulling="2025-10-06 13:52:04.613524936 +0000 UTC m=+813.110843463" lastFinishedPulling="2025-10-06 13:52:09.79057528 +0000 UTC m=+818.287893817" observedRunningTime="2025-10-06 13:52:10.765727658 +0000 UTC m=+819.263046205" watchObservedRunningTime="2025-10-06 13:52:10.770570902 +0000 UTC m=+819.267889459" Oct 06 13:52:11 crc kubenswrapper[4757]: I1006 13:52:11.060120 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9qgx5" Oct 06 13:52:11 crc kubenswrapper[4757]: I1006 13:52:11.228575 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ae2b2b3a-6062-48ab-9a7a-08a465fae4e0-catalog-content\") pod \"ae2b2b3a-6062-48ab-9a7a-08a465fae4e0\" (UID: \"ae2b2b3a-6062-48ab-9a7a-08a465fae4e0\") " Oct 06 13:52:11 crc kubenswrapper[4757]: I1006 13:52:11.228684 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fbdp8\" (UniqueName: \"kubernetes.io/projected/ae2b2b3a-6062-48ab-9a7a-08a465fae4e0-kube-api-access-fbdp8\") pod \"ae2b2b3a-6062-48ab-9a7a-08a465fae4e0\" (UID: \"ae2b2b3a-6062-48ab-9a7a-08a465fae4e0\") " Oct 06 13:52:11 crc kubenswrapper[4757]: I1006 13:52:11.228791 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ae2b2b3a-6062-48ab-9a7a-08a465fae4e0-utilities\") pod \"ae2b2b3a-6062-48ab-9a7a-08a465fae4e0\" (UID: \"ae2b2b3a-6062-48ab-9a7a-08a465fae4e0\") " Oct 06 13:52:11 crc kubenswrapper[4757]: I1006 13:52:11.229918 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ae2b2b3a-6062-48ab-9a7a-08a465fae4e0-utilities" (OuterVolumeSpecName: "utilities") pod "ae2b2b3a-6062-48ab-9a7a-08a465fae4e0" (UID: "ae2b2b3a-6062-48ab-9a7a-08a465fae4e0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 13:52:11 crc kubenswrapper[4757]: I1006 13:52:11.244324 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ae2b2b3a-6062-48ab-9a7a-08a465fae4e0-kube-api-access-fbdp8" (OuterVolumeSpecName: "kube-api-access-fbdp8") pod "ae2b2b3a-6062-48ab-9a7a-08a465fae4e0" (UID: "ae2b2b3a-6062-48ab-9a7a-08a465fae4e0"). InnerVolumeSpecName "kube-api-access-fbdp8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:52:11 crc kubenswrapper[4757]: I1006 13:52:11.280178 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ae2b2b3a-6062-48ab-9a7a-08a465fae4e0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ae2b2b3a-6062-48ab-9a7a-08a465fae4e0" (UID: "ae2b2b3a-6062-48ab-9a7a-08a465fae4e0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 13:52:11 crc kubenswrapper[4757]: I1006 13:52:11.331846 4757 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ae2b2b3a-6062-48ab-9a7a-08a465fae4e0-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 06 13:52:11 crc kubenswrapper[4757]: I1006 13:52:11.331898 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fbdp8\" (UniqueName: \"kubernetes.io/projected/ae2b2b3a-6062-48ab-9a7a-08a465fae4e0-kube-api-access-fbdp8\") on node \"crc\" DevicePath \"\"" Oct 06 13:52:11 crc kubenswrapper[4757]: I1006 13:52:11.331911 4757 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ae2b2b3a-6062-48ab-9a7a-08a465fae4e0-utilities\") on node \"crc\" DevicePath \"\"" Oct 06 13:52:11 crc kubenswrapper[4757]: I1006 13:52:11.691998 4757 generic.go:334] "Generic (PLEG): container finished" podID="07dfce6e-a6a9-44b4-a3c0-2f8778c6309e" containerID="ff9b07a8b3b87201f758b12e7e0b9fbad8aeda95b10bd68ba2a3e0766cbea4e0" exitCode=0 Oct 06 13:52:11 crc kubenswrapper[4757]: I1006 13:52:11.692215 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-q49jh" event={"ID":"07dfce6e-a6a9-44b4-a3c0-2f8778c6309e","Type":"ContainerDied","Data":"ff9b07a8b3b87201f758b12e7e0b9fbad8aeda95b10bd68ba2a3e0766cbea4e0"} Oct 06 13:52:11 crc kubenswrapper[4757]: I1006 13:52:11.696175 4757 generic.go:334] "Generic (PLEG): container finished" podID="ae2b2b3a-6062-48ab-9a7a-08a465fae4e0" containerID="8a22a43d5144257a65f7d37470ca4dba38fb0a8fff562fcd92e1265e755e9c70" exitCode=0 Oct 06 13:52:11 crc kubenswrapper[4757]: I1006 13:52:11.696249 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9qgx5" event={"ID":"ae2b2b3a-6062-48ab-9a7a-08a465fae4e0","Type":"ContainerDied","Data":"8a22a43d5144257a65f7d37470ca4dba38fb0a8fff562fcd92e1265e755e9c70"} Oct 06 13:52:11 crc kubenswrapper[4757]: I1006 13:52:11.696277 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9qgx5" Oct 06 13:52:11 crc kubenswrapper[4757]: I1006 13:52:11.696300 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9qgx5" event={"ID":"ae2b2b3a-6062-48ab-9a7a-08a465fae4e0","Type":"ContainerDied","Data":"6efc812bfe273f956530413a5bfc658444a8c0b2f48186658a1eca8002ed66ff"} Oct 06 13:52:11 crc kubenswrapper[4757]: I1006 13:52:11.696339 4757 scope.go:117] "RemoveContainer" containerID="8a22a43d5144257a65f7d37470ca4dba38fb0a8fff562fcd92e1265e755e9c70" Oct 06 13:52:11 crc kubenswrapper[4757]: I1006 13:52:11.726703 4757 scope.go:117] "RemoveContainer" containerID="ed7d2ada70f005c5bfbb68e4aeec3ff4880dacdefd9c12c37bf3a58e40d947cf" Oct 06 13:52:11 crc kubenswrapper[4757]: I1006 13:52:11.756508 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-9qgx5"] Oct 06 13:52:11 crc kubenswrapper[4757]: I1006 13:52:11.762357 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-9qgx5"] Oct 06 13:52:11 crc kubenswrapper[4757]: I1006 13:52:11.830884 4757 scope.go:117] "RemoveContainer" containerID="6ee77199ec24a5833f6276b32ec1edabec619689ec7e50188cc3c64067a4dcc6" Oct 06 13:52:11 crc kubenswrapper[4757]: I1006 13:52:11.850704 4757 scope.go:117] "RemoveContainer" containerID="8a22a43d5144257a65f7d37470ca4dba38fb0a8fff562fcd92e1265e755e9c70" Oct 06 13:52:11 crc kubenswrapper[4757]: E1006 13:52:11.851162 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8a22a43d5144257a65f7d37470ca4dba38fb0a8fff562fcd92e1265e755e9c70\": container with ID starting with 8a22a43d5144257a65f7d37470ca4dba38fb0a8fff562fcd92e1265e755e9c70 not found: ID does not exist" containerID="8a22a43d5144257a65f7d37470ca4dba38fb0a8fff562fcd92e1265e755e9c70" Oct 06 13:52:11 crc kubenswrapper[4757]: I1006 13:52:11.851198 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8a22a43d5144257a65f7d37470ca4dba38fb0a8fff562fcd92e1265e755e9c70"} err="failed to get container status \"8a22a43d5144257a65f7d37470ca4dba38fb0a8fff562fcd92e1265e755e9c70\": rpc error: code = NotFound desc = could not find container \"8a22a43d5144257a65f7d37470ca4dba38fb0a8fff562fcd92e1265e755e9c70\": container with ID starting with 8a22a43d5144257a65f7d37470ca4dba38fb0a8fff562fcd92e1265e755e9c70 not found: ID does not exist" Oct 06 13:52:11 crc kubenswrapper[4757]: I1006 13:52:11.851221 4757 scope.go:117] "RemoveContainer" containerID="ed7d2ada70f005c5bfbb68e4aeec3ff4880dacdefd9c12c37bf3a58e40d947cf" Oct 06 13:52:11 crc kubenswrapper[4757]: E1006 13:52:11.851580 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ed7d2ada70f005c5bfbb68e4aeec3ff4880dacdefd9c12c37bf3a58e40d947cf\": container with ID starting with ed7d2ada70f005c5bfbb68e4aeec3ff4880dacdefd9c12c37bf3a58e40d947cf not found: ID does not exist" containerID="ed7d2ada70f005c5bfbb68e4aeec3ff4880dacdefd9c12c37bf3a58e40d947cf" Oct 06 13:52:11 crc kubenswrapper[4757]: I1006 13:52:11.851602 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ed7d2ada70f005c5bfbb68e4aeec3ff4880dacdefd9c12c37bf3a58e40d947cf"} err="failed to get container status \"ed7d2ada70f005c5bfbb68e4aeec3ff4880dacdefd9c12c37bf3a58e40d947cf\": rpc error: code = NotFound desc = could not find container \"ed7d2ada70f005c5bfbb68e4aeec3ff4880dacdefd9c12c37bf3a58e40d947cf\": container with ID starting with ed7d2ada70f005c5bfbb68e4aeec3ff4880dacdefd9c12c37bf3a58e40d947cf not found: ID does not exist" Oct 06 13:52:11 crc kubenswrapper[4757]: I1006 13:52:11.851615 4757 scope.go:117] "RemoveContainer" containerID="6ee77199ec24a5833f6276b32ec1edabec619689ec7e50188cc3c64067a4dcc6" Oct 06 13:52:11 crc kubenswrapper[4757]: E1006 13:52:11.851863 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6ee77199ec24a5833f6276b32ec1edabec619689ec7e50188cc3c64067a4dcc6\": container with ID starting with 6ee77199ec24a5833f6276b32ec1edabec619689ec7e50188cc3c64067a4dcc6 not found: ID does not exist" containerID="6ee77199ec24a5833f6276b32ec1edabec619689ec7e50188cc3c64067a4dcc6" Oct 06 13:52:11 crc kubenswrapper[4757]: I1006 13:52:11.851910 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6ee77199ec24a5833f6276b32ec1edabec619689ec7e50188cc3c64067a4dcc6"} err="failed to get container status \"6ee77199ec24a5833f6276b32ec1edabec619689ec7e50188cc3c64067a4dcc6\": rpc error: code = NotFound desc = could not find container \"6ee77199ec24a5833f6276b32ec1edabec619689ec7e50188cc3c64067a4dcc6\": container with ID starting with 6ee77199ec24a5833f6276b32ec1edabec619689ec7e50188cc3c64067a4dcc6 not found: ID does not exist" Oct 06 13:52:12 crc kubenswrapper[4757]: I1006 13:52:12.189343 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ae2b2b3a-6062-48ab-9a7a-08a465fae4e0" path="/var/lib/kubelet/pods/ae2b2b3a-6062-48ab-9a7a-08a465fae4e0/volumes" Oct 06 13:52:12 crc kubenswrapper[4757]: I1006 13:52:12.242255 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-68d546b9d8-8sfhp" Oct 06 13:52:12 crc kubenswrapper[4757]: I1006 13:52:12.703843 4757 generic.go:334] "Generic (PLEG): container finished" podID="07dfce6e-a6a9-44b4-a3c0-2f8778c6309e" containerID="54c9ea1026b96f165756108c0d37ed3feaeed8e84822de951ac1388cb4e98df7" exitCode=0 Oct 06 13:52:12 crc kubenswrapper[4757]: I1006 13:52:12.703904 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-q49jh" event={"ID":"07dfce6e-a6a9-44b4-a3c0-2f8778c6309e","Type":"ContainerDied","Data":"54c9ea1026b96f165756108c0d37ed3feaeed8e84822de951ac1388cb4e98df7"} Oct 06 13:52:13 crc kubenswrapper[4757]: I1006 13:52:13.133427 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-gz6s7" Oct 06 13:52:13 crc kubenswrapper[4757]: I1006 13:52:13.134420 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-gz6s7" Oct 06 13:52:13 crc kubenswrapper[4757]: I1006 13:52:13.188950 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-gz6s7" Oct 06 13:52:13 crc kubenswrapper[4757]: I1006 13:52:13.714781 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-q49jh" event={"ID":"07dfce6e-a6a9-44b4-a3c0-2f8778c6309e","Type":"ContainerStarted","Data":"668f6b97a47bfbea8a98ab0fe5674c5e0ffafaa1abacc1ecd5b3dd8ab460c610"} Oct 06 13:52:13 crc kubenswrapper[4757]: I1006 13:52:13.714821 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-q49jh" event={"ID":"07dfce6e-a6a9-44b4-a3c0-2f8778c6309e","Type":"ContainerStarted","Data":"5a49c5515d896dcd71dec14e085d139cc5cd95ee61c92c1c5b5873afd2af0591"} Oct 06 13:52:13 crc kubenswrapper[4757]: I1006 13:52:13.714833 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-q49jh" event={"ID":"07dfce6e-a6a9-44b4-a3c0-2f8778c6309e","Type":"ContainerStarted","Data":"9f43be0765658bf62b60ec2d393af2ef87f1a116e3013c29a4df5adf39423901"} Oct 06 13:52:13 crc kubenswrapper[4757]: I1006 13:52:13.714842 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-q49jh" event={"ID":"07dfce6e-a6a9-44b4-a3c0-2f8778c6309e","Type":"ContainerStarted","Data":"782987b76c5b1b35bbce5cea43f5de8e97656dcbb5a4ff4ec0d8994670e4d57e"} Oct 06 13:52:13 crc kubenswrapper[4757]: I1006 13:52:13.714851 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-q49jh" event={"ID":"07dfce6e-a6a9-44b4-a3c0-2f8778c6309e","Type":"ContainerStarted","Data":"3d016c625a7cc79d76aa41cc6d0486624e6762dbee5a1de5d1b61a0ffd781971"} Oct 06 13:52:14 crc kubenswrapper[4757]: I1006 13:52:14.725795 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-q49jh" event={"ID":"07dfce6e-a6a9-44b4-a3c0-2f8778c6309e","Type":"ContainerStarted","Data":"510257ec0a9f6ca46a58345db7c722467d88c78ef47081f8f9c683ff400fe902"} Oct 06 13:52:14 crc kubenswrapper[4757]: I1006 13:52:14.727683 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-q49jh" Oct 06 13:52:14 crc kubenswrapper[4757]: I1006 13:52:14.753219 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-q49jh" podStartSLOduration=6.108414574 podStartE2EDuration="13.753187523s" podCreationTimestamp="2025-10-06 13:52:01 +0000 UTC" firstStartedPulling="2025-10-06 13:52:02.253408544 +0000 UTC m=+810.750727071" lastFinishedPulling="2025-10-06 13:52:09.898181483 +0000 UTC m=+818.395500020" observedRunningTime="2025-10-06 13:52:14.750539589 +0000 UTC m=+823.247858146" watchObservedRunningTime="2025-10-06 13:52:14.753187523 +0000 UTC m=+823.250506100" Oct 06 13:52:14 crc kubenswrapper[4757]: I1006 13:52:14.801011 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-gz6s7" Oct 06 13:52:15 crc kubenswrapper[4757]: I1006 13:52:15.367277 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-gz6s7"] Oct 06 13:52:16 crc kubenswrapper[4757]: I1006 13:52:16.740329 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-gz6s7" podUID="14a98b27-3a97-42c2-bd77-e23eec5c8ecc" containerName="registry-server" containerID="cri-o://3d99684d83b316383bcf734aa8043e1d011bc8f3427de26edc6cfb08244b5abe" gracePeriod=2 Oct 06 13:52:17 crc kubenswrapper[4757]: I1006 13:52:17.082695 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-q49jh" Oct 06 13:52:17 crc kubenswrapper[4757]: I1006 13:52:17.152036 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-q49jh" Oct 06 13:52:17 crc kubenswrapper[4757]: I1006 13:52:17.717629 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gz6s7" Oct 06 13:52:17 crc kubenswrapper[4757]: I1006 13:52:17.772050 4757 generic.go:334] "Generic (PLEG): container finished" podID="14a98b27-3a97-42c2-bd77-e23eec5c8ecc" containerID="3d99684d83b316383bcf734aa8043e1d011bc8f3427de26edc6cfb08244b5abe" exitCode=0 Oct 06 13:52:17 crc kubenswrapper[4757]: I1006 13:52:17.772297 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gz6s7" event={"ID":"14a98b27-3a97-42c2-bd77-e23eec5c8ecc","Type":"ContainerDied","Data":"3d99684d83b316383bcf734aa8043e1d011bc8f3427de26edc6cfb08244b5abe"} Oct 06 13:52:17 crc kubenswrapper[4757]: I1006 13:52:17.772336 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gz6s7" event={"ID":"14a98b27-3a97-42c2-bd77-e23eec5c8ecc","Type":"ContainerDied","Data":"9f4c1cee1b07db7f2b75eb4606889a35c9d1a3d0d254fd73bdebd281f5bdd653"} Oct 06 13:52:17 crc kubenswrapper[4757]: I1006 13:52:17.772353 4757 scope.go:117] "RemoveContainer" containerID="3d99684d83b316383bcf734aa8043e1d011bc8f3427de26edc6cfb08244b5abe" Oct 06 13:52:17 crc kubenswrapper[4757]: I1006 13:52:17.773224 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gz6s7" Oct 06 13:52:17 crc kubenswrapper[4757]: I1006 13:52:17.800489 4757 scope.go:117] "RemoveContainer" containerID="2a7231a65fc3a6f865634cc46081df96af84e001bc57b61668c9a1ac2ff826bc" Oct 06 13:52:17 crc kubenswrapper[4757]: I1006 13:52:17.819841 4757 scope.go:117] "RemoveContainer" containerID="a52eac5aa0b0e96898a16a978e87ef22fcfa9a90f5c939f24b934558ecf4f054" Oct 06 13:52:17 crc kubenswrapper[4757]: I1006 13:52:17.834224 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qn65d\" (UniqueName: \"kubernetes.io/projected/14a98b27-3a97-42c2-bd77-e23eec5c8ecc-kube-api-access-qn65d\") pod \"14a98b27-3a97-42c2-bd77-e23eec5c8ecc\" (UID: \"14a98b27-3a97-42c2-bd77-e23eec5c8ecc\") " Oct 06 13:52:17 crc kubenswrapper[4757]: I1006 13:52:17.834361 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/14a98b27-3a97-42c2-bd77-e23eec5c8ecc-catalog-content\") pod \"14a98b27-3a97-42c2-bd77-e23eec5c8ecc\" (UID: \"14a98b27-3a97-42c2-bd77-e23eec5c8ecc\") " Oct 06 13:52:17 crc kubenswrapper[4757]: I1006 13:52:17.834463 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/14a98b27-3a97-42c2-bd77-e23eec5c8ecc-utilities\") pod \"14a98b27-3a97-42c2-bd77-e23eec5c8ecc\" (UID: \"14a98b27-3a97-42c2-bd77-e23eec5c8ecc\") " Oct 06 13:52:17 crc kubenswrapper[4757]: I1006 13:52:17.835358 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/14a98b27-3a97-42c2-bd77-e23eec5c8ecc-utilities" (OuterVolumeSpecName: "utilities") pod "14a98b27-3a97-42c2-bd77-e23eec5c8ecc" (UID: "14a98b27-3a97-42c2-bd77-e23eec5c8ecc"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 13:52:17 crc kubenswrapper[4757]: I1006 13:52:17.835515 4757 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/14a98b27-3a97-42c2-bd77-e23eec5c8ecc-utilities\") on node \"crc\" DevicePath \"\"" Oct 06 13:52:17 crc kubenswrapper[4757]: I1006 13:52:17.842338 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/14a98b27-3a97-42c2-bd77-e23eec5c8ecc-kube-api-access-qn65d" (OuterVolumeSpecName: "kube-api-access-qn65d") pod "14a98b27-3a97-42c2-bd77-e23eec5c8ecc" (UID: "14a98b27-3a97-42c2-bd77-e23eec5c8ecc"). InnerVolumeSpecName "kube-api-access-qn65d". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:52:17 crc kubenswrapper[4757]: I1006 13:52:17.848114 4757 scope.go:117] "RemoveContainer" containerID="3d99684d83b316383bcf734aa8043e1d011bc8f3427de26edc6cfb08244b5abe" Oct 06 13:52:17 crc kubenswrapper[4757]: E1006 13:52:17.848552 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3d99684d83b316383bcf734aa8043e1d011bc8f3427de26edc6cfb08244b5abe\": container with ID starting with 3d99684d83b316383bcf734aa8043e1d011bc8f3427de26edc6cfb08244b5abe not found: ID does not exist" containerID="3d99684d83b316383bcf734aa8043e1d011bc8f3427de26edc6cfb08244b5abe" Oct 06 13:52:17 crc kubenswrapper[4757]: I1006 13:52:17.848599 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3d99684d83b316383bcf734aa8043e1d011bc8f3427de26edc6cfb08244b5abe"} err="failed to get container status \"3d99684d83b316383bcf734aa8043e1d011bc8f3427de26edc6cfb08244b5abe\": rpc error: code = NotFound desc = could not find container \"3d99684d83b316383bcf734aa8043e1d011bc8f3427de26edc6cfb08244b5abe\": container with ID starting with 3d99684d83b316383bcf734aa8043e1d011bc8f3427de26edc6cfb08244b5abe not found: ID does not exist" Oct 06 13:52:17 crc kubenswrapper[4757]: I1006 13:52:17.848625 4757 scope.go:117] "RemoveContainer" containerID="2a7231a65fc3a6f865634cc46081df96af84e001bc57b61668c9a1ac2ff826bc" Oct 06 13:52:17 crc kubenswrapper[4757]: E1006 13:52:17.848951 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2a7231a65fc3a6f865634cc46081df96af84e001bc57b61668c9a1ac2ff826bc\": container with ID starting with 2a7231a65fc3a6f865634cc46081df96af84e001bc57b61668c9a1ac2ff826bc not found: ID does not exist" containerID="2a7231a65fc3a6f865634cc46081df96af84e001bc57b61668c9a1ac2ff826bc" Oct 06 13:52:17 crc kubenswrapper[4757]: I1006 13:52:17.848975 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2a7231a65fc3a6f865634cc46081df96af84e001bc57b61668c9a1ac2ff826bc"} err="failed to get container status \"2a7231a65fc3a6f865634cc46081df96af84e001bc57b61668c9a1ac2ff826bc\": rpc error: code = NotFound desc = could not find container \"2a7231a65fc3a6f865634cc46081df96af84e001bc57b61668c9a1ac2ff826bc\": container with ID starting with 2a7231a65fc3a6f865634cc46081df96af84e001bc57b61668c9a1ac2ff826bc not found: ID does not exist" Oct 06 13:52:17 crc kubenswrapper[4757]: I1006 13:52:17.848988 4757 scope.go:117] "RemoveContainer" containerID="a52eac5aa0b0e96898a16a978e87ef22fcfa9a90f5c939f24b934558ecf4f054" Oct 06 13:52:17 crc kubenswrapper[4757]: E1006 13:52:17.849458 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a52eac5aa0b0e96898a16a978e87ef22fcfa9a90f5c939f24b934558ecf4f054\": container with ID starting with a52eac5aa0b0e96898a16a978e87ef22fcfa9a90f5c939f24b934558ecf4f054 not found: ID does not exist" containerID="a52eac5aa0b0e96898a16a978e87ef22fcfa9a90f5c939f24b934558ecf4f054" Oct 06 13:52:17 crc kubenswrapper[4757]: I1006 13:52:17.849500 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a52eac5aa0b0e96898a16a978e87ef22fcfa9a90f5c939f24b934558ecf4f054"} err="failed to get container status \"a52eac5aa0b0e96898a16a978e87ef22fcfa9a90f5c939f24b934558ecf4f054\": rpc error: code = NotFound desc = could not find container \"a52eac5aa0b0e96898a16a978e87ef22fcfa9a90f5c939f24b934558ecf4f054\": container with ID starting with a52eac5aa0b0e96898a16a978e87ef22fcfa9a90f5c939f24b934558ecf4f054 not found: ID does not exist" Oct 06 13:52:17 crc kubenswrapper[4757]: I1006 13:52:17.877531 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/14a98b27-3a97-42c2-bd77-e23eec5c8ecc-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "14a98b27-3a97-42c2-bd77-e23eec5c8ecc" (UID: "14a98b27-3a97-42c2-bd77-e23eec5c8ecc"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 13:52:17 crc kubenswrapper[4757]: I1006 13:52:17.936913 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qn65d\" (UniqueName: \"kubernetes.io/projected/14a98b27-3a97-42c2-bd77-e23eec5c8ecc-kube-api-access-qn65d\") on node \"crc\" DevicePath \"\"" Oct 06 13:52:17 crc kubenswrapper[4757]: I1006 13:52:17.936984 4757 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/14a98b27-3a97-42c2-bd77-e23eec5c8ecc-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 06 13:52:18 crc kubenswrapper[4757]: I1006 13:52:18.108683 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-gz6s7"] Oct 06 13:52:18 crc kubenswrapper[4757]: I1006 13:52:18.112699 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-gz6s7"] Oct 06 13:52:18 crc kubenswrapper[4757]: I1006 13:52:18.186637 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="14a98b27-3a97-42c2-bd77-e23eec5c8ecc" path="/var/lib/kubelet/pods/14a98b27-3a97-42c2-bd77-e23eec5c8ecc/volumes" Oct 06 13:52:22 crc kubenswrapper[4757]: I1006 13:52:22.085632 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-q49jh" Oct 06 13:52:22 crc kubenswrapper[4757]: I1006 13:52:22.711472 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-f9gxl" Oct 06 13:52:23 crc kubenswrapper[4757]: I1006 13:52:23.733520 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-2cj4z" Oct 06 13:52:25 crc kubenswrapper[4757]: I1006 13:52:25.322780 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69kqvhp"] Oct 06 13:52:25 crc kubenswrapper[4757]: E1006 13:52:25.323530 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ae2b2b3a-6062-48ab-9a7a-08a465fae4e0" containerName="extract-content" Oct 06 13:52:25 crc kubenswrapper[4757]: I1006 13:52:25.323545 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="ae2b2b3a-6062-48ab-9a7a-08a465fae4e0" containerName="extract-content" Oct 06 13:52:25 crc kubenswrapper[4757]: E1006 13:52:25.323564 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="14a98b27-3a97-42c2-bd77-e23eec5c8ecc" containerName="extract-utilities" Oct 06 13:52:25 crc kubenswrapper[4757]: I1006 13:52:25.323572 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="14a98b27-3a97-42c2-bd77-e23eec5c8ecc" containerName="extract-utilities" Oct 06 13:52:25 crc kubenswrapper[4757]: E1006 13:52:25.323582 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="14a98b27-3a97-42c2-bd77-e23eec5c8ecc" containerName="registry-server" Oct 06 13:52:25 crc kubenswrapper[4757]: I1006 13:52:25.323591 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="14a98b27-3a97-42c2-bd77-e23eec5c8ecc" containerName="registry-server" Oct 06 13:52:25 crc kubenswrapper[4757]: E1006 13:52:25.323612 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ae2b2b3a-6062-48ab-9a7a-08a465fae4e0" containerName="extract-utilities" Oct 06 13:52:25 crc kubenswrapper[4757]: I1006 13:52:25.323619 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="ae2b2b3a-6062-48ab-9a7a-08a465fae4e0" containerName="extract-utilities" Oct 06 13:52:25 crc kubenswrapper[4757]: E1006 13:52:25.323638 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ae2b2b3a-6062-48ab-9a7a-08a465fae4e0" containerName="registry-server" Oct 06 13:52:25 crc kubenswrapper[4757]: I1006 13:52:25.323645 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="ae2b2b3a-6062-48ab-9a7a-08a465fae4e0" containerName="registry-server" Oct 06 13:52:25 crc kubenswrapper[4757]: E1006 13:52:25.323652 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="14a98b27-3a97-42c2-bd77-e23eec5c8ecc" containerName="extract-content" Oct 06 13:52:25 crc kubenswrapper[4757]: I1006 13:52:25.323658 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="14a98b27-3a97-42c2-bd77-e23eec5c8ecc" containerName="extract-content" Oct 06 13:52:25 crc kubenswrapper[4757]: I1006 13:52:25.323864 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="14a98b27-3a97-42c2-bd77-e23eec5c8ecc" containerName="registry-server" Oct 06 13:52:25 crc kubenswrapper[4757]: I1006 13:52:25.323886 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="ae2b2b3a-6062-48ab-9a7a-08a465fae4e0" containerName="registry-server" Oct 06 13:52:25 crc kubenswrapper[4757]: I1006 13:52:25.325421 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69kqvhp" Oct 06 13:52:25 crc kubenswrapper[4757]: I1006 13:52:25.330502 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Oct 06 13:52:25 crc kubenswrapper[4757]: I1006 13:52:25.339870 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69kqvhp"] Oct 06 13:52:25 crc kubenswrapper[4757]: I1006 13:52:25.438001 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4pv76\" (UniqueName: \"kubernetes.io/projected/e7df9b75-6121-46ca-89f6-963c9665b8cf-kube-api-access-4pv76\") pod \"695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69kqvhp\" (UID: \"e7df9b75-6121-46ca-89f6-963c9665b8cf\") " pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69kqvhp" Oct 06 13:52:25 crc kubenswrapper[4757]: I1006 13:52:25.438059 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e7df9b75-6121-46ca-89f6-963c9665b8cf-util\") pod \"695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69kqvhp\" (UID: \"e7df9b75-6121-46ca-89f6-963c9665b8cf\") " pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69kqvhp" Oct 06 13:52:25 crc kubenswrapper[4757]: I1006 13:52:25.438118 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e7df9b75-6121-46ca-89f6-963c9665b8cf-bundle\") pod \"695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69kqvhp\" (UID: \"e7df9b75-6121-46ca-89f6-963c9665b8cf\") " pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69kqvhp" Oct 06 13:52:25 crc kubenswrapper[4757]: I1006 13:52:25.539604 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e7df9b75-6121-46ca-89f6-963c9665b8cf-bundle\") pod \"695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69kqvhp\" (UID: \"e7df9b75-6121-46ca-89f6-963c9665b8cf\") " pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69kqvhp" Oct 06 13:52:25 crc kubenswrapper[4757]: I1006 13:52:25.539688 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4pv76\" (UniqueName: \"kubernetes.io/projected/e7df9b75-6121-46ca-89f6-963c9665b8cf-kube-api-access-4pv76\") pod \"695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69kqvhp\" (UID: \"e7df9b75-6121-46ca-89f6-963c9665b8cf\") " pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69kqvhp" Oct 06 13:52:25 crc kubenswrapper[4757]: I1006 13:52:25.539716 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e7df9b75-6121-46ca-89f6-963c9665b8cf-util\") pod \"695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69kqvhp\" (UID: \"e7df9b75-6121-46ca-89f6-963c9665b8cf\") " pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69kqvhp" Oct 06 13:52:25 crc kubenswrapper[4757]: I1006 13:52:25.540178 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e7df9b75-6121-46ca-89f6-963c9665b8cf-util\") pod \"695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69kqvhp\" (UID: \"e7df9b75-6121-46ca-89f6-963c9665b8cf\") " pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69kqvhp" Oct 06 13:52:25 crc kubenswrapper[4757]: I1006 13:52:25.540386 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e7df9b75-6121-46ca-89f6-963c9665b8cf-bundle\") pod \"695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69kqvhp\" (UID: \"e7df9b75-6121-46ca-89f6-963c9665b8cf\") " pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69kqvhp" Oct 06 13:52:25 crc kubenswrapper[4757]: I1006 13:52:25.562419 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4pv76\" (UniqueName: \"kubernetes.io/projected/e7df9b75-6121-46ca-89f6-963c9665b8cf-kube-api-access-4pv76\") pod \"695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69kqvhp\" (UID: \"e7df9b75-6121-46ca-89f6-963c9665b8cf\") " pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69kqvhp" Oct 06 13:52:25 crc kubenswrapper[4757]: I1006 13:52:25.697199 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69kqvhp" Oct 06 13:52:26 crc kubenswrapper[4757]: I1006 13:52:26.134843 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69kqvhp"] Oct 06 13:52:26 crc kubenswrapper[4757]: W1006 13:52:26.142340 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode7df9b75_6121_46ca_89f6_963c9665b8cf.slice/crio-7b37b87159eeeb15694b8c656701a01e6770ee8ecdf975a55d2004769a0fe1bf WatchSource:0}: Error finding container 7b37b87159eeeb15694b8c656701a01e6770ee8ecdf975a55d2004769a0fe1bf: Status 404 returned error can't find the container with id 7b37b87159eeeb15694b8c656701a01e6770ee8ecdf975a55d2004769a0fe1bf Oct 06 13:52:26 crc kubenswrapper[4757]: I1006 13:52:26.838535 4757 generic.go:334] "Generic (PLEG): container finished" podID="e7df9b75-6121-46ca-89f6-963c9665b8cf" containerID="06e80559dc1cc8d588c6dcba1a0213b8955e348b29ed369642b13d2be6ebf4aa" exitCode=0 Oct 06 13:52:26 crc kubenswrapper[4757]: I1006 13:52:26.838606 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69kqvhp" event={"ID":"e7df9b75-6121-46ca-89f6-963c9665b8cf","Type":"ContainerDied","Data":"06e80559dc1cc8d588c6dcba1a0213b8955e348b29ed369642b13d2be6ebf4aa"} Oct 06 13:52:26 crc kubenswrapper[4757]: I1006 13:52:26.838937 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69kqvhp" event={"ID":"e7df9b75-6121-46ca-89f6-963c9665b8cf","Type":"ContainerStarted","Data":"7b37b87159eeeb15694b8c656701a01e6770ee8ecdf975a55d2004769a0fe1bf"} Oct 06 13:52:29 crc kubenswrapper[4757]: I1006 13:52:29.858687 4757 generic.go:334] "Generic (PLEG): container finished" podID="e7df9b75-6121-46ca-89f6-963c9665b8cf" containerID="ec30bf2d714b8bcdd837304b48585620b2ff69df695e8dc69bd2e269946f2b61" exitCode=0 Oct 06 13:52:29 crc kubenswrapper[4757]: I1006 13:52:29.858821 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69kqvhp" event={"ID":"e7df9b75-6121-46ca-89f6-963c9665b8cf","Type":"ContainerDied","Data":"ec30bf2d714b8bcdd837304b48585620b2ff69df695e8dc69bd2e269946f2b61"} Oct 06 13:52:30 crc kubenswrapper[4757]: I1006 13:52:30.868414 4757 generic.go:334] "Generic (PLEG): container finished" podID="e7df9b75-6121-46ca-89f6-963c9665b8cf" containerID="da2e89162ae32afeee6a816c114881b8e237ee4ad4c696d9c6ffb0cc44fcf72b" exitCode=0 Oct 06 13:52:30 crc kubenswrapper[4757]: I1006 13:52:30.868506 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69kqvhp" event={"ID":"e7df9b75-6121-46ca-89f6-963c9665b8cf","Type":"ContainerDied","Data":"da2e89162ae32afeee6a816c114881b8e237ee4ad4c696d9c6ffb0cc44fcf72b"} Oct 06 13:52:32 crc kubenswrapper[4757]: I1006 13:52:32.191354 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69kqvhp" Oct 06 13:52:32 crc kubenswrapper[4757]: I1006 13:52:32.342112 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4pv76\" (UniqueName: \"kubernetes.io/projected/e7df9b75-6121-46ca-89f6-963c9665b8cf-kube-api-access-4pv76\") pod \"e7df9b75-6121-46ca-89f6-963c9665b8cf\" (UID: \"e7df9b75-6121-46ca-89f6-963c9665b8cf\") " Oct 06 13:52:32 crc kubenswrapper[4757]: I1006 13:52:32.342213 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e7df9b75-6121-46ca-89f6-963c9665b8cf-bundle\") pod \"e7df9b75-6121-46ca-89f6-963c9665b8cf\" (UID: \"e7df9b75-6121-46ca-89f6-963c9665b8cf\") " Oct 06 13:52:32 crc kubenswrapper[4757]: I1006 13:52:32.342271 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e7df9b75-6121-46ca-89f6-963c9665b8cf-util\") pod \"e7df9b75-6121-46ca-89f6-963c9665b8cf\" (UID: \"e7df9b75-6121-46ca-89f6-963c9665b8cf\") " Oct 06 13:52:32 crc kubenswrapper[4757]: I1006 13:52:32.343754 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e7df9b75-6121-46ca-89f6-963c9665b8cf-bundle" (OuterVolumeSpecName: "bundle") pod "e7df9b75-6121-46ca-89f6-963c9665b8cf" (UID: "e7df9b75-6121-46ca-89f6-963c9665b8cf"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 13:52:32 crc kubenswrapper[4757]: I1006 13:52:32.350047 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7df9b75-6121-46ca-89f6-963c9665b8cf-kube-api-access-4pv76" (OuterVolumeSpecName: "kube-api-access-4pv76") pod "e7df9b75-6121-46ca-89f6-963c9665b8cf" (UID: "e7df9b75-6121-46ca-89f6-963c9665b8cf"). InnerVolumeSpecName "kube-api-access-4pv76". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:52:32 crc kubenswrapper[4757]: I1006 13:52:32.354852 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e7df9b75-6121-46ca-89f6-963c9665b8cf-util" (OuterVolumeSpecName: "util") pod "e7df9b75-6121-46ca-89f6-963c9665b8cf" (UID: "e7df9b75-6121-46ca-89f6-963c9665b8cf"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 13:52:32 crc kubenswrapper[4757]: I1006 13:52:32.444588 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4pv76\" (UniqueName: \"kubernetes.io/projected/e7df9b75-6121-46ca-89f6-963c9665b8cf-kube-api-access-4pv76\") on node \"crc\" DevicePath \"\"" Oct 06 13:52:32 crc kubenswrapper[4757]: I1006 13:52:32.444619 4757 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e7df9b75-6121-46ca-89f6-963c9665b8cf-bundle\") on node \"crc\" DevicePath \"\"" Oct 06 13:52:32 crc kubenswrapper[4757]: I1006 13:52:32.444631 4757 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e7df9b75-6121-46ca-89f6-963c9665b8cf-util\") on node \"crc\" DevicePath \"\"" Oct 06 13:52:32 crc kubenswrapper[4757]: I1006 13:52:32.887017 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69kqvhp" event={"ID":"e7df9b75-6121-46ca-89f6-963c9665b8cf","Type":"ContainerDied","Data":"7b37b87159eeeb15694b8c656701a01e6770ee8ecdf975a55d2004769a0fe1bf"} Oct 06 13:52:32 crc kubenswrapper[4757]: I1006 13:52:32.887305 4757 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7b37b87159eeeb15694b8c656701a01e6770ee8ecdf975a55d2004769a0fe1bf" Oct 06 13:52:32 crc kubenswrapper[4757]: I1006 13:52:32.887363 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69kqvhp" Oct 06 13:52:35 crc kubenswrapper[4757]: I1006 13:52:35.821898 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-57cd46d6d-f4dkt"] Oct 06 13:52:35 crc kubenswrapper[4757]: E1006 13:52:35.822450 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e7df9b75-6121-46ca-89f6-963c9665b8cf" containerName="pull" Oct 06 13:52:35 crc kubenswrapper[4757]: I1006 13:52:35.822464 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="e7df9b75-6121-46ca-89f6-963c9665b8cf" containerName="pull" Oct 06 13:52:35 crc kubenswrapper[4757]: E1006 13:52:35.822479 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e7df9b75-6121-46ca-89f6-963c9665b8cf" containerName="util" Oct 06 13:52:35 crc kubenswrapper[4757]: I1006 13:52:35.822488 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="e7df9b75-6121-46ca-89f6-963c9665b8cf" containerName="util" Oct 06 13:52:35 crc kubenswrapper[4757]: E1006 13:52:35.822500 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e7df9b75-6121-46ca-89f6-963c9665b8cf" containerName="extract" Oct 06 13:52:35 crc kubenswrapper[4757]: I1006 13:52:35.822509 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="e7df9b75-6121-46ca-89f6-963c9665b8cf" containerName="extract" Oct 06 13:52:35 crc kubenswrapper[4757]: I1006 13:52:35.822664 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="e7df9b75-6121-46ca-89f6-963c9665b8cf" containerName="extract" Oct 06 13:52:35 crc kubenswrapper[4757]: I1006 13:52:35.823136 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager-operator/cert-manager-operator-controller-manager-57cd46d6d-f4dkt" Oct 06 13:52:35 crc kubenswrapper[4757]: I1006 13:52:35.825336 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager-operator"/"kube-root-ca.crt" Oct 06 13:52:35 crc kubenswrapper[4757]: I1006 13:52:35.825727 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager-operator"/"openshift-service-ca.crt" Oct 06 13:52:35 crc kubenswrapper[4757]: I1006 13:52:35.827533 4757 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager-operator"/"cert-manager-operator-controller-manager-dockercfg-gdv82" Oct 06 13:52:35 crc kubenswrapper[4757]: I1006 13:52:35.840362 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-57cd46d6d-f4dkt"] Oct 06 13:52:35 crc kubenswrapper[4757]: I1006 13:52:35.989901 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5zf2w\" (UniqueName: \"kubernetes.io/projected/e4cb64f6-b956-4917-9243-0dc77ced4ecb-kube-api-access-5zf2w\") pod \"cert-manager-operator-controller-manager-57cd46d6d-f4dkt\" (UID: \"e4cb64f6-b956-4917-9243-0dc77ced4ecb\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-57cd46d6d-f4dkt" Oct 06 13:52:36 crc kubenswrapper[4757]: I1006 13:52:36.091587 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5zf2w\" (UniqueName: \"kubernetes.io/projected/e4cb64f6-b956-4917-9243-0dc77ced4ecb-kube-api-access-5zf2w\") pod \"cert-manager-operator-controller-manager-57cd46d6d-f4dkt\" (UID: \"e4cb64f6-b956-4917-9243-0dc77ced4ecb\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-57cd46d6d-f4dkt" Oct 06 13:52:36 crc kubenswrapper[4757]: I1006 13:52:36.117329 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5zf2w\" (UniqueName: \"kubernetes.io/projected/e4cb64f6-b956-4917-9243-0dc77ced4ecb-kube-api-access-5zf2w\") pod \"cert-manager-operator-controller-manager-57cd46d6d-f4dkt\" (UID: \"e4cb64f6-b956-4917-9243-0dc77ced4ecb\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-57cd46d6d-f4dkt" Oct 06 13:52:36 crc kubenswrapper[4757]: I1006 13:52:36.140349 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager-operator/cert-manager-operator-controller-manager-57cd46d6d-f4dkt" Oct 06 13:52:36 crc kubenswrapper[4757]: I1006 13:52:36.635033 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-57cd46d6d-f4dkt"] Oct 06 13:52:36 crc kubenswrapper[4757]: W1006 13:52:36.643681 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode4cb64f6_b956_4917_9243_0dc77ced4ecb.slice/crio-680a3bf7f5c5621deb930d179342707b658d8abd7c37b85d48b858c072998d11 WatchSource:0}: Error finding container 680a3bf7f5c5621deb930d179342707b658d8abd7c37b85d48b858c072998d11: Status 404 returned error can't find the container with id 680a3bf7f5c5621deb930d179342707b658d8abd7c37b85d48b858c072998d11 Oct 06 13:52:36 crc kubenswrapper[4757]: I1006 13:52:36.914492 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager-operator/cert-manager-operator-controller-manager-57cd46d6d-f4dkt" event={"ID":"e4cb64f6-b956-4917-9243-0dc77ced4ecb","Type":"ContainerStarted","Data":"680a3bf7f5c5621deb930d179342707b658d8abd7c37b85d48b858c072998d11"} Oct 06 13:52:43 crc kubenswrapper[4757]: I1006 13:52:43.970229 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager-operator/cert-manager-operator-controller-manager-57cd46d6d-f4dkt" event={"ID":"e4cb64f6-b956-4917-9243-0dc77ced4ecb","Type":"ContainerStarted","Data":"00492878faf28b501a79a95bfe8abaed374c86f89ec615eb151c5d97072466ca"} Oct 06 13:52:44 crc kubenswrapper[4757]: I1006 13:52:44.004914 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager-operator/cert-manager-operator-controller-manager-57cd46d6d-f4dkt" podStartSLOduration=2.794953714 podStartE2EDuration="9.004883523s" podCreationTimestamp="2025-10-06 13:52:35 +0000 UTC" firstStartedPulling="2025-10-06 13:52:36.64660053 +0000 UTC m=+845.143919077" lastFinishedPulling="2025-10-06 13:52:42.856530309 +0000 UTC m=+851.353848886" observedRunningTime="2025-10-06 13:52:43.998375205 +0000 UTC m=+852.495693812" watchObservedRunningTime="2025-10-06 13:52:44.004883523 +0000 UTC m=+852.502202100" Oct 06 13:52:46 crc kubenswrapper[4757]: I1006 13:52:46.523599 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-d969966f-xx877"] Oct 06 13:52:46 crc kubenswrapper[4757]: I1006 13:52:46.524949 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-d969966f-xx877" Oct 06 13:52:46 crc kubenswrapper[4757]: I1006 13:52:46.527270 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Oct 06 13:52:46 crc kubenswrapper[4757]: I1006 13:52:46.527610 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Oct 06 13:52:46 crc kubenswrapper[4757]: I1006 13:52:46.533729 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-d969966f-xx877"] Oct 06 13:52:46 crc kubenswrapper[4757]: I1006 13:52:46.568614 4757 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-6k2hb" Oct 06 13:52:46 crc kubenswrapper[4757]: I1006 13:52:46.671444 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/f261a0cf-2cfa-452a-9366-430ae1d09fac-bound-sa-token\") pod \"cert-manager-webhook-d969966f-xx877\" (UID: \"f261a0cf-2cfa-452a-9366-430ae1d09fac\") " pod="cert-manager/cert-manager-webhook-d969966f-xx877" Oct 06 13:52:46 crc kubenswrapper[4757]: I1006 13:52:46.671810 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wh9tz\" (UniqueName: \"kubernetes.io/projected/f261a0cf-2cfa-452a-9366-430ae1d09fac-kube-api-access-wh9tz\") pod \"cert-manager-webhook-d969966f-xx877\" (UID: \"f261a0cf-2cfa-452a-9366-430ae1d09fac\") " pod="cert-manager/cert-manager-webhook-d969966f-xx877" Oct 06 13:52:46 crc kubenswrapper[4757]: I1006 13:52:46.773462 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/f261a0cf-2cfa-452a-9366-430ae1d09fac-bound-sa-token\") pod \"cert-manager-webhook-d969966f-xx877\" (UID: \"f261a0cf-2cfa-452a-9366-430ae1d09fac\") " pod="cert-manager/cert-manager-webhook-d969966f-xx877" Oct 06 13:52:46 crc kubenswrapper[4757]: I1006 13:52:46.773811 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wh9tz\" (UniqueName: \"kubernetes.io/projected/f261a0cf-2cfa-452a-9366-430ae1d09fac-kube-api-access-wh9tz\") pod \"cert-manager-webhook-d969966f-xx877\" (UID: \"f261a0cf-2cfa-452a-9366-430ae1d09fac\") " pod="cert-manager/cert-manager-webhook-d969966f-xx877" Oct 06 13:52:46 crc kubenswrapper[4757]: I1006 13:52:46.794214 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/f261a0cf-2cfa-452a-9366-430ae1d09fac-bound-sa-token\") pod \"cert-manager-webhook-d969966f-xx877\" (UID: \"f261a0cf-2cfa-452a-9366-430ae1d09fac\") " pod="cert-manager/cert-manager-webhook-d969966f-xx877" Oct 06 13:52:46 crc kubenswrapper[4757]: I1006 13:52:46.795048 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wh9tz\" (UniqueName: \"kubernetes.io/projected/f261a0cf-2cfa-452a-9366-430ae1d09fac-kube-api-access-wh9tz\") pod \"cert-manager-webhook-d969966f-xx877\" (UID: \"f261a0cf-2cfa-452a-9366-430ae1d09fac\") " pod="cert-manager/cert-manager-webhook-d969966f-xx877" Oct 06 13:52:46 crc kubenswrapper[4757]: I1006 13:52:46.876399 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-d969966f-xx877" Oct 06 13:52:47 crc kubenswrapper[4757]: I1006 13:52:47.315989 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-d969966f-xx877"] Oct 06 13:52:47 crc kubenswrapper[4757]: W1006 13:52:47.325154 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf261a0cf_2cfa_452a_9366_430ae1d09fac.slice/crio-16771258b7fc77b812be2a024d3e8dd2da1dd4978f31bdd83dbb56e180856aaf WatchSource:0}: Error finding container 16771258b7fc77b812be2a024d3e8dd2da1dd4978f31bdd83dbb56e180856aaf: Status 404 returned error can't find the container with id 16771258b7fc77b812be2a024d3e8dd2da1dd4978f31bdd83dbb56e180856aaf Oct 06 13:52:47 crc kubenswrapper[4757]: I1006 13:52:47.993167 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-d969966f-xx877" event={"ID":"f261a0cf-2cfa-452a-9366-430ae1d09fac","Type":"ContainerStarted","Data":"16771258b7fc77b812be2a024d3e8dd2da1dd4978f31bdd83dbb56e180856aaf"} Oct 06 13:52:48 crc kubenswrapper[4757]: I1006 13:52:48.629418 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-7d9f95dbf-wz4lh"] Oct 06 13:52:48 crc kubenswrapper[4757]: I1006 13:52:48.630748 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7d9f95dbf-wz4lh" Oct 06 13:52:48 crc kubenswrapper[4757]: I1006 13:52:48.633524 4757 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-j6g7j" Oct 06 13:52:48 crc kubenswrapper[4757]: I1006 13:52:48.636231 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7d9f95dbf-wz4lh"] Oct 06 13:52:48 crc kubenswrapper[4757]: I1006 13:52:48.706381 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ccqh6\" (UniqueName: \"kubernetes.io/projected/1e270c4f-567e-4a7f-8693-0fc471447e47-kube-api-access-ccqh6\") pod \"cert-manager-cainjector-7d9f95dbf-wz4lh\" (UID: \"1e270c4f-567e-4a7f-8693-0fc471447e47\") " pod="cert-manager/cert-manager-cainjector-7d9f95dbf-wz4lh" Oct 06 13:52:48 crc kubenswrapper[4757]: I1006 13:52:48.706460 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/1e270c4f-567e-4a7f-8693-0fc471447e47-bound-sa-token\") pod \"cert-manager-cainjector-7d9f95dbf-wz4lh\" (UID: \"1e270c4f-567e-4a7f-8693-0fc471447e47\") " pod="cert-manager/cert-manager-cainjector-7d9f95dbf-wz4lh" Oct 06 13:52:48 crc kubenswrapper[4757]: I1006 13:52:48.807433 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/1e270c4f-567e-4a7f-8693-0fc471447e47-bound-sa-token\") pod \"cert-manager-cainjector-7d9f95dbf-wz4lh\" (UID: \"1e270c4f-567e-4a7f-8693-0fc471447e47\") " pod="cert-manager/cert-manager-cainjector-7d9f95dbf-wz4lh" Oct 06 13:52:48 crc kubenswrapper[4757]: I1006 13:52:48.807530 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ccqh6\" (UniqueName: \"kubernetes.io/projected/1e270c4f-567e-4a7f-8693-0fc471447e47-kube-api-access-ccqh6\") pod \"cert-manager-cainjector-7d9f95dbf-wz4lh\" (UID: \"1e270c4f-567e-4a7f-8693-0fc471447e47\") " pod="cert-manager/cert-manager-cainjector-7d9f95dbf-wz4lh" Oct 06 13:52:48 crc kubenswrapper[4757]: I1006 13:52:48.839176 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/1e270c4f-567e-4a7f-8693-0fc471447e47-bound-sa-token\") pod \"cert-manager-cainjector-7d9f95dbf-wz4lh\" (UID: \"1e270c4f-567e-4a7f-8693-0fc471447e47\") " pod="cert-manager/cert-manager-cainjector-7d9f95dbf-wz4lh" Oct 06 13:52:48 crc kubenswrapper[4757]: I1006 13:52:48.839411 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ccqh6\" (UniqueName: \"kubernetes.io/projected/1e270c4f-567e-4a7f-8693-0fc471447e47-kube-api-access-ccqh6\") pod \"cert-manager-cainjector-7d9f95dbf-wz4lh\" (UID: \"1e270c4f-567e-4a7f-8693-0fc471447e47\") " pod="cert-manager/cert-manager-cainjector-7d9f95dbf-wz4lh" Oct 06 13:52:48 crc kubenswrapper[4757]: I1006 13:52:48.997868 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7d9f95dbf-wz4lh" Oct 06 13:52:49 crc kubenswrapper[4757]: I1006 13:52:49.418597 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7d9f95dbf-wz4lh"] Oct 06 13:52:49 crc kubenswrapper[4757]: W1006 13:52:49.429604 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1e270c4f_567e_4a7f_8693_0fc471447e47.slice/crio-f241f5b687626a3b4b533c476907eac4fdadc413bc7f6fbe1c04126a3867ce14 WatchSource:0}: Error finding container f241f5b687626a3b4b533c476907eac4fdadc413bc7f6fbe1c04126a3867ce14: Status 404 returned error can't find the container with id f241f5b687626a3b4b533c476907eac4fdadc413bc7f6fbe1c04126a3867ce14 Oct 06 13:52:50 crc kubenswrapper[4757]: I1006 13:52:50.019395 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7d9f95dbf-wz4lh" event={"ID":"1e270c4f-567e-4a7f-8693-0fc471447e47","Type":"ContainerStarted","Data":"f241f5b687626a3b4b533c476907eac4fdadc413bc7f6fbe1c04126a3867ce14"} Oct 06 13:52:52 crc kubenswrapper[4757]: I1006 13:52:52.030430 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-d969966f-xx877" event={"ID":"f261a0cf-2cfa-452a-9366-430ae1d09fac","Type":"ContainerStarted","Data":"73b7d1349f8882f9f7283d3854620e0c3d8614df7b9b09d702f5eedf47731b7b"} Oct 06 13:52:52 crc kubenswrapper[4757]: I1006 13:52:52.030767 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-d969966f-xx877" Oct 06 13:52:52 crc kubenswrapper[4757]: I1006 13:52:52.031819 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7d9f95dbf-wz4lh" event={"ID":"1e270c4f-567e-4a7f-8693-0fc471447e47","Type":"ContainerStarted","Data":"8f7a5b1f0acf29d37db79cb7ec739088bc2c9caf616ab72a192860680b8ef6f4"} Oct 06 13:52:52 crc kubenswrapper[4757]: I1006 13:52:52.047644 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-d969966f-xx877" podStartSLOduration=1.9564524479999998 podStartE2EDuration="6.047617787s" podCreationTimestamp="2025-10-06 13:52:46 +0000 UTC" firstStartedPulling="2025-10-06 13:52:47.327369265 +0000 UTC m=+855.824687822" lastFinishedPulling="2025-10-06 13:52:51.418534614 +0000 UTC m=+859.915853161" observedRunningTime="2025-10-06 13:52:52.042746848 +0000 UTC m=+860.540065385" watchObservedRunningTime="2025-10-06 13:52:52.047617787 +0000 UTC m=+860.544936324" Oct 06 13:52:56 crc kubenswrapper[4757]: I1006 13:52:56.881804 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-d969966f-xx877" Oct 06 13:52:56 crc kubenswrapper[4757]: I1006 13:52:56.904473 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-7d9f95dbf-wz4lh" podStartSLOduration=6.940605118 podStartE2EDuration="8.904444484s" podCreationTimestamp="2025-10-06 13:52:48 +0000 UTC" firstStartedPulling="2025-10-06 13:52:49.432521919 +0000 UTC m=+857.929840456" lastFinishedPulling="2025-10-06 13:52:51.396361285 +0000 UTC m=+859.893679822" observedRunningTime="2025-10-06 13:52:52.062301457 +0000 UTC m=+860.559619994" watchObservedRunningTime="2025-10-06 13:52:56.904444484 +0000 UTC m=+865.401763061" Oct 06 13:52:57 crc kubenswrapper[4757]: I1006 13:52:57.323072 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-7d4cc89fcb-wl5f4"] Oct 06 13:52:57 crc kubenswrapper[4757]: I1006 13:52:57.323804 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-7d4cc89fcb-wl5f4" Oct 06 13:52:57 crc kubenswrapper[4757]: I1006 13:52:57.328641 4757 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-rjkfl" Oct 06 13:52:57 crc kubenswrapper[4757]: I1006 13:52:57.334276 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-7d4cc89fcb-wl5f4"] Oct 06 13:52:57 crc kubenswrapper[4757]: I1006 13:52:57.431918 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/88d7e4e8-73b8-4d37-9e0d-3fc2cfc95528-bound-sa-token\") pod \"cert-manager-7d4cc89fcb-wl5f4\" (UID: \"88d7e4e8-73b8-4d37-9e0d-3fc2cfc95528\") " pod="cert-manager/cert-manager-7d4cc89fcb-wl5f4" Oct 06 13:52:57 crc kubenswrapper[4757]: I1006 13:52:57.432590 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j7hsf\" (UniqueName: \"kubernetes.io/projected/88d7e4e8-73b8-4d37-9e0d-3fc2cfc95528-kube-api-access-j7hsf\") pod \"cert-manager-7d4cc89fcb-wl5f4\" (UID: \"88d7e4e8-73b8-4d37-9e0d-3fc2cfc95528\") " pod="cert-manager/cert-manager-7d4cc89fcb-wl5f4" Oct 06 13:52:57 crc kubenswrapper[4757]: I1006 13:52:57.533428 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/88d7e4e8-73b8-4d37-9e0d-3fc2cfc95528-bound-sa-token\") pod \"cert-manager-7d4cc89fcb-wl5f4\" (UID: \"88d7e4e8-73b8-4d37-9e0d-3fc2cfc95528\") " pod="cert-manager/cert-manager-7d4cc89fcb-wl5f4" Oct 06 13:52:57 crc kubenswrapper[4757]: I1006 13:52:57.533508 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j7hsf\" (UniqueName: \"kubernetes.io/projected/88d7e4e8-73b8-4d37-9e0d-3fc2cfc95528-kube-api-access-j7hsf\") pod \"cert-manager-7d4cc89fcb-wl5f4\" (UID: \"88d7e4e8-73b8-4d37-9e0d-3fc2cfc95528\") " pod="cert-manager/cert-manager-7d4cc89fcb-wl5f4" Oct 06 13:52:57 crc kubenswrapper[4757]: I1006 13:52:57.551864 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/88d7e4e8-73b8-4d37-9e0d-3fc2cfc95528-bound-sa-token\") pod \"cert-manager-7d4cc89fcb-wl5f4\" (UID: \"88d7e4e8-73b8-4d37-9e0d-3fc2cfc95528\") " pod="cert-manager/cert-manager-7d4cc89fcb-wl5f4" Oct 06 13:52:57 crc kubenswrapper[4757]: I1006 13:52:57.554788 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j7hsf\" (UniqueName: \"kubernetes.io/projected/88d7e4e8-73b8-4d37-9e0d-3fc2cfc95528-kube-api-access-j7hsf\") pod \"cert-manager-7d4cc89fcb-wl5f4\" (UID: \"88d7e4e8-73b8-4d37-9e0d-3fc2cfc95528\") " pod="cert-manager/cert-manager-7d4cc89fcb-wl5f4" Oct 06 13:52:57 crc kubenswrapper[4757]: I1006 13:52:57.642787 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-7d4cc89fcb-wl5f4" Oct 06 13:52:58 crc kubenswrapper[4757]: I1006 13:52:58.068637 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-7d4cc89fcb-wl5f4"] Oct 06 13:52:59 crc kubenswrapper[4757]: I1006 13:52:59.076356 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-7d4cc89fcb-wl5f4" event={"ID":"88d7e4e8-73b8-4d37-9e0d-3fc2cfc95528","Type":"ContainerStarted","Data":"cb5a36a86d8b51a157da29a969cdca8aa8922fcb96033f7ffeb3bd1745f5c33d"} Oct 06 13:52:59 crc kubenswrapper[4757]: I1006 13:52:59.076761 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-7d4cc89fcb-wl5f4" event={"ID":"88d7e4e8-73b8-4d37-9e0d-3fc2cfc95528","Type":"ContainerStarted","Data":"83ddbf79bdbe37f933341acd4888a4ad99a8d9e2f4c9b6dca7b04b3a1d4b6ed7"} Oct 06 13:52:59 crc kubenswrapper[4757]: I1006 13:52:59.094692 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-7d4cc89fcb-wl5f4" podStartSLOduration=2.094671102 podStartE2EDuration="2.094671102s" podCreationTimestamp="2025-10-06 13:52:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:52:59.091605458 +0000 UTC m=+867.588923995" watchObservedRunningTime="2025-10-06 13:52:59.094671102 +0000 UTC m=+867.591989659" Oct 06 13:53:10 crc kubenswrapper[4757]: I1006 13:53:10.894223 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-t4k98"] Oct 06 13:53:10 crc kubenswrapper[4757]: I1006 13:53:10.895766 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-t4k98" Oct 06 13:53:10 crc kubenswrapper[4757]: I1006 13:53:10.902664 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Oct 06 13:53:10 crc kubenswrapper[4757]: I1006 13:53:10.903194 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-rqh2v" Oct 06 13:53:10 crc kubenswrapper[4757]: I1006 13:53:10.903229 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Oct 06 13:53:10 crc kubenswrapper[4757]: I1006 13:53:10.952254 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-t4k98"] Oct 06 13:53:10 crc kubenswrapper[4757]: I1006 13:53:10.973539 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n4khw\" (UniqueName: \"kubernetes.io/projected/11cf9e92-637c-449d-9398-db72e27550c2-kube-api-access-n4khw\") pod \"openstack-operator-index-t4k98\" (UID: \"11cf9e92-637c-449d-9398-db72e27550c2\") " pod="openstack-operators/openstack-operator-index-t4k98" Oct 06 13:53:11 crc kubenswrapper[4757]: I1006 13:53:11.075184 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n4khw\" (UniqueName: \"kubernetes.io/projected/11cf9e92-637c-449d-9398-db72e27550c2-kube-api-access-n4khw\") pod \"openstack-operator-index-t4k98\" (UID: \"11cf9e92-637c-449d-9398-db72e27550c2\") " pod="openstack-operators/openstack-operator-index-t4k98" Oct 06 13:53:11 crc kubenswrapper[4757]: I1006 13:53:11.094787 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n4khw\" (UniqueName: \"kubernetes.io/projected/11cf9e92-637c-449d-9398-db72e27550c2-kube-api-access-n4khw\") pod \"openstack-operator-index-t4k98\" (UID: \"11cf9e92-637c-449d-9398-db72e27550c2\") " pod="openstack-operators/openstack-operator-index-t4k98" Oct 06 13:53:11 crc kubenswrapper[4757]: I1006 13:53:11.220269 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-t4k98" Oct 06 13:53:11 crc kubenswrapper[4757]: I1006 13:53:11.512050 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-t4k98"] Oct 06 13:53:12 crc kubenswrapper[4757]: I1006 13:53:12.176873 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-t4k98" event={"ID":"11cf9e92-637c-449d-9398-db72e27550c2","Type":"ContainerStarted","Data":"1e68359f6b536848434b3c58c067a478dbc92807167140d02344aec6271a5fd9"} Oct 06 13:53:13 crc kubenswrapper[4757]: I1006 13:53:13.199365 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-t4k98" event={"ID":"11cf9e92-637c-449d-9398-db72e27550c2","Type":"ContainerStarted","Data":"7a54341c5b7f57a5f6103fd5f08db9826768b2353ac7e62846e84c9951dbb2e8"} Oct 06 13:53:13 crc kubenswrapper[4757]: I1006 13:53:13.228618 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-t4k98" podStartSLOduration=2.061801131 podStartE2EDuration="3.228592681s" podCreationTimestamp="2025-10-06 13:53:10 +0000 UTC" firstStartedPulling="2025-10-06 13:53:11.521135415 +0000 UTC m=+880.018453992" lastFinishedPulling="2025-10-06 13:53:12.687927005 +0000 UTC m=+881.185245542" observedRunningTime="2025-10-06 13:53:13.220900674 +0000 UTC m=+881.718219241" watchObservedRunningTime="2025-10-06 13:53:13.228592681 +0000 UTC m=+881.725911228" Oct 06 13:53:14 crc kubenswrapper[4757]: I1006 13:53:14.270192 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-t4k98"] Oct 06 13:53:14 crc kubenswrapper[4757]: I1006 13:53:14.875928 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-ddqvx"] Oct 06 13:53:14 crc kubenswrapper[4757]: I1006 13:53:14.876812 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-ddqvx" Oct 06 13:53:14 crc kubenswrapper[4757]: I1006 13:53:14.891006 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-ddqvx"] Oct 06 13:53:14 crc kubenswrapper[4757]: I1006 13:53:14.929005 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r2w25\" (UniqueName: \"kubernetes.io/projected/7164f700-e389-47c2-b3a7-2e4afe8ebc36-kube-api-access-r2w25\") pod \"openstack-operator-index-ddqvx\" (UID: \"7164f700-e389-47c2-b3a7-2e4afe8ebc36\") " pod="openstack-operators/openstack-operator-index-ddqvx" Oct 06 13:53:15 crc kubenswrapper[4757]: I1006 13:53:15.029987 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r2w25\" (UniqueName: \"kubernetes.io/projected/7164f700-e389-47c2-b3a7-2e4afe8ebc36-kube-api-access-r2w25\") pod \"openstack-operator-index-ddqvx\" (UID: \"7164f700-e389-47c2-b3a7-2e4afe8ebc36\") " pod="openstack-operators/openstack-operator-index-ddqvx" Oct 06 13:53:15 crc kubenswrapper[4757]: I1006 13:53:15.061644 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r2w25\" (UniqueName: \"kubernetes.io/projected/7164f700-e389-47c2-b3a7-2e4afe8ebc36-kube-api-access-r2w25\") pod \"openstack-operator-index-ddqvx\" (UID: \"7164f700-e389-47c2-b3a7-2e4afe8ebc36\") " pod="openstack-operators/openstack-operator-index-ddqvx" Oct 06 13:53:15 crc kubenswrapper[4757]: I1006 13:53:15.213554 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/openstack-operator-index-t4k98" podUID="11cf9e92-637c-449d-9398-db72e27550c2" containerName="registry-server" containerID="cri-o://7a54341c5b7f57a5f6103fd5f08db9826768b2353ac7e62846e84c9951dbb2e8" gracePeriod=2 Oct 06 13:53:15 crc kubenswrapper[4757]: I1006 13:53:15.223451 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-ddqvx" Oct 06 13:53:15 crc kubenswrapper[4757]: I1006 13:53:15.459109 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-ddqvx"] Oct 06 13:53:15 crc kubenswrapper[4757]: W1006 13:53:15.467645 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7164f700_e389_47c2_b3a7_2e4afe8ebc36.slice/crio-dffb38195758227b5d9111fade8a9aaceeb7f8c7ecd04966b43566768a1b7ac6 WatchSource:0}: Error finding container dffb38195758227b5d9111fade8a9aaceeb7f8c7ecd04966b43566768a1b7ac6: Status 404 returned error can't find the container with id dffb38195758227b5d9111fade8a9aaceeb7f8c7ecd04966b43566768a1b7ac6 Oct 06 13:53:16 crc kubenswrapper[4757]: I1006 13:53:16.016938 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-t4k98" Oct 06 13:53:16 crc kubenswrapper[4757]: I1006 13:53:16.146490 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n4khw\" (UniqueName: \"kubernetes.io/projected/11cf9e92-637c-449d-9398-db72e27550c2-kube-api-access-n4khw\") pod \"11cf9e92-637c-449d-9398-db72e27550c2\" (UID: \"11cf9e92-637c-449d-9398-db72e27550c2\") " Oct 06 13:53:16 crc kubenswrapper[4757]: I1006 13:53:16.153501 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/11cf9e92-637c-449d-9398-db72e27550c2-kube-api-access-n4khw" (OuterVolumeSpecName: "kube-api-access-n4khw") pod "11cf9e92-637c-449d-9398-db72e27550c2" (UID: "11cf9e92-637c-449d-9398-db72e27550c2"). InnerVolumeSpecName "kube-api-access-n4khw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:53:16 crc kubenswrapper[4757]: I1006 13:53:16.218728 4757 generic.go:334] "Generic (PLEG): container finished" podID="11cf9e92-637c-449d-9398-db72e27550c2" containerID="7a54341c5b7f57a5f6103fd5f08db9826768b2353ac7e62846e84c9951dbb2e8" exitCode=0 Oct 06 13:53:16 crc kubenswrapper[4757]: I1006 13:53:16.218793 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-t4k98" Oct 06 13:53:16 crc kubenswrapper[4757]: I1006 13:53:16.218785 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-t4k98" event={"ID":"11cf9e92-637c-449d-9398-db72e27550c2","Type":"ContainerDied","Data":"7a54341c5b7f57a5f6103fd5f08db9826768b2353ac7e62846e84c9951dbb2e8"} Oct 06 13:53:16 crc kubenswrapper[4757]: I1006 13:53:16.218866 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-t4k98" event={"ID":"11cf9e92-637c-449d-9398-db72e27550c2","Type":"ContainerDied","Data":"1e68359f6b536848434b3c58c067a478dbc92807167140d02344aec6271a5fd9"} Oct 06 13:53:16 crc kubenswrapper[4757]: I1006 13:53:16.218889 4757 scope.go:117] "RemoveContainer" containerID="7a54341c5b7f57a5f6103fd5f08db9826768b2353ac7e62846e84c9951dbb2e8" Oct 06 13:53:16 crc kubenswrapper[4757]: I1006 13:53:16.219944 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-ddqvx" event={"ID":"7164f700-e389-47c2-b3a7-2e4afe8ebc36","Type":"ContainerStarted","Data":"dffb38195758227b5d9111fade8a9aaceeb7f8c7ecd04966b43566768a1b7ac6"} Oct 06 13:53:16 crc kubenswrapper[4757]: I1006 13:53:16.237270 4757 scope.go:117] "RemoveContainer" containerID="7a54341c5b7f57a5f6103fd5f08db9826768b2353ac7e62846e84c9951dbb2e8" Oct 06 13:53:16 crc kubenswrapper[4757]: E1006 13:53:16.240975 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7a54341c5b7f57a5f6103fd5f08db9826768b2353ac7e62846e84c9951dbb2e8\": container with ID starting with 7a54341c5b7f57a5f6103fd5f08db9826768b2353ac7e62846e84c9951dbb2e8 not found: ID does not exist" containerID="7a54341c5b7f57a5f6103fd5f08db9826768b2353ac7e62846e84c9951dbb2e8" Oct 06 13:53:16 crc kubenswrapper[4757]: I1006 13:53:16.241019 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7a54341c5b7f57a5f6103fd5f08db9826768b2353ac7e62846e84c9951dbb2e8"} err="failed to get container status \"7a54341c5b7f57a5f6103fd5f08db9826768b2353ac7e62846e84c9951dbb2e8\": rpc error: code = NotFound desc = could not find container \"7a54341c5b7f57a5f6103fd5f08db9826768b2353ac7e62846e84c9951dbb2e8\": container with ID starting with 7a54341c5b7f57a5f6103fd5f08db9826768b2353ac7e62846e84c9951dbb2e8 not found: ID does not exist" Oct 06 13:53:16 crc kubenswrapper[4757]: I1006 13:53:16.244385 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-t4k98"] Oct 06 13:53:16 crc kubenswrapper[4757]: I1006 13:53:16.247968 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n4khw\" (UniqueName: \"kubernetes.io/projected/11cf9e92-637c-449d-9398-db72e27550c2-kube-api-access-n4khw\") on node \"crc\" DevicePath \"\"" Oct 06 13:53:16 crc kubenswrapper[4757]: I1006 13:53:16.248442 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-operator-index-t4k98"] Oct 06 13:53:17 crc kubenswrapper[4757]: I1006 13:53:17.231078 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-ddqvx" event={"ID":"7164f700-e389-47c2-b3a7-2e4afe8ebc36","Type":"ContainerStarted","Data":"ab9c5824540b0588e32d899633d211ced2427a05f6d460cd06336f22c50540a3"} Oct 06 13:53:17 crc kubenswrapper[4757]: I1006 13:53:17.259520 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-ddqvx" podStartSLOduration=2.626624896 podStartE2EDuration="3.259484594s" podCreationTimestamp="2025-10-06 13:53:14 +0000 UTC" firstStartedPulling="2025-10-06 13:53:15.471496232 +0000 UTC m=+883.968814769" lastFinishedPulling="2025-10-06 13:53:16.10435593 +0000 UTC m=+884.601674467" observedRunningTime="2025-10-06 13:53:17.250745396 +0000 UTC m=+885.748063973" watchObservedRunningTime="2025-10-06 13:53:17.259484594 +0000 UTC m=+885.756803181" Oct 06 13:53:18 crc kubenswrapper[4757]: I1006 13:53:18.188429 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="11cf9e92-637c-449d-9398-db72e27550c2" path="/var/lib/kubelet/pods/11cf9e92-637c-449d-9398-db72e27550c2/volumes" Oct 06 13:53:25 crc kubenswrapper[4757]: I1006 13:53:25.224997 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/openstack-operator-index-ddqvx" Oct 06 13:53:25 crc kubenswrapper[4757]: I1006 13:53:25.225593 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-ddqvx" Oct 06 13:53:25 crc kubenswrapper[4757]: I1006 13:53:25.258321 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/openstack-operator-index-ddqvx" Oct 06 13:53:25 crc kubenswrapper[4757]: I1006 13:53:25.317880 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-index-ddqvx" Oct 06 13:53:32 crc kubenswrapper[4757]: I1006 13:53:32.878967 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/15a1c145fda98e6dff4d00edd620a8374d6690a1bd22f46b75c47a2ff9gxhn8"] Oct 06 13:53:32 crc kubenswrapper[4757]: E1006 13:53:32.880330 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="11cf9e92-637c-449d-9398-db72e27550c2" containerName="registry-server" Oct 06 13:53:32 crc kubenswrapper[4757]: I1006 13:53:32.880354 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="11cf9e92-637c-449d-9398-db72e27550c2" containerName="registry-server" Oct 06 13:53:32 crc kubenswrapper[4757]: I1006 13:53:32.880611 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="11cf9e92-637c-449d-9398-db72e27550c2" containerName="registry-server" Oct 06 13:53:32 crc kubenswrapper[4757]: I1006 13:53:32.882400 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/15a1c145fda98e6dff4d00edd620a8374d6690a1bd22f46b75c47a2ff9gxhn8" Oct 06 13:53:32 crc kubenswrapper[4757]: I1006 13:53:32.885001 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-pjh8z" Oct 06 13:53:32 crc kubenswrapper[4757]: I1006 13:53:32.892387 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/15a1c145fda98e6dff4d00edd620a8374d6690a1bd22f46b75c47a2ff9gxhn8"] Oct 06 13:53:32 crc kubenswrapper[4757]: I1006 13:53:32.903749 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8wjvp\" (UniqueName: \"kubernetes.io/projected/90ed9755-badd-4897-be59-4ffb24a37b83-kube-api-access-8wjvp\") pod \"15a1c145fda98e6dff4d00edd620a8374d6690a1bd22f46b75c47a2ff9gxhn8\" (UID: \"90ed9755-badd-4897-be59-4ffb24a37b83\") " pod="openstack-operators/15a1c145fda98e6dff4d00edd620a8374d6690a1bd22f46b75c47a2ff9gxhn8" Oct 06 13:53:32 crc kubenswrapper[4757]: I1006 13:53:32.903835 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/90ed9755-badd-4897-be59-4ffb24a37b83-util\") pod \"15a1c145fda98e6dff4d00edd620a8374d6690a1bd22f46b75c47a2ff9gxhn8\" (UID: \"90ed9755-badd-4897-be59-4ffb24a37b83\") " pod="openstack-operators/15a1c145fda98e6dff4d00edd620a8374d6690a1bd22f46b75c47a2ff9gxhn8" Oct 06 13:53:32 crc kubenswrapper[4757]: I1006 13:53:32.904036 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/90ed9755-badd-4897-be59-4ffb24a37b83-bundle\") pod \"15a1c145fda98e6dff4d00edd620a8374d6690a1bd22f46b75c47a2ff9gxhn8\" (UID: \"90ed9755-badd-4897-be59-4ffb24a37b83\") " pod="openstack-operators/15a1c145fda98e6dff4d00edd620a8374d6690a1bd22f46b75c47a2ff9gxhn8" Oct 06 13:53:33 crc kubenswrapper[4757]: I1006 13:53:33.004873 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/90ed9755-badd-4897-be59-4ffb24a37b83-bundle\") pod \"15a1c145fda98e6dff4d00edd620a8374d6690a1bd22f46b75c47a2ff9gxhn8\" (UID: \"90ed9755-badd-4897-be59-4ffb24a37b83\") " pod="openstack-operators/15a1c145fda98e6dff4d00edd620a8374d6690a1bd22f46b75c47a2ff9gxhn8" Oct 06 13:53:33 crc kubenswrapper[4757]: I1006 13:53:33.004982 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8wjvp\" (UniqueName: \"kubernetes.io/projected/90ed9755-badd-4897-be59-4ffb24a37b83-kube-api-access-8wjvp\") pod \"15a1c145fda98e6dff4d00edd620a8374d6690a1bd22f46b75c47a2ff9gxhn8\" (UID: \"90ed9755-badd-4897-be59-4ffb24a37b83\") " pod="openstack-operators/15a1c145fda98e6dff4d00edd620a8374d6690a1bd22f46b75c47a2ff9gxhn8" Oct 06 13:53:33 crc kubenswrapper[4757]: I1006 13:53:33.005016 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/90ed9755-badd-4897-be59-4ffb24a37b83-util\") pod \"15a1c145fda98e6dff4d00edd620a8374d6690a1bd22f46b75c47a2ff9gxhn8\" (UID: \"90ed9755-badd-4897-be59-4ffb24a37b83\") " pod="openstack-operators/15a1c145fda98e6dff4d00edd620a8374d6690a1bd22f46b75c47a2ff9gxhn8" Oct 06 13:53:33 crc kubenswrapper[4757]: I1006 13:53:33.005768 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/90ed9755-badd-4897-be59-4ffb24a37b83-bundle\") pod \"15a1c145fda98e6dff4d00edd620a8374d6690a1bd22f46b75c47a2ff9gxhn8\" (UID: \"90ed9755-badd-4897-be59-4ffb24a37b83\") " pod="openstack-operators/15a1c145fda98e6dff4d00edd620a8374d6690a1bd22f46b75c47a2ff9gxhn8" Oct 06 13:53:33 crc kubenswrapper[4757]: I1006 13:53:33.005826 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/90ed9755-badd-4897-be59-4ffb24a37b83-util\") pod \"15a1c145fda98e6dff4d00edd620a8374d6690a1bd22f46b75c47a2ff9gxhn8\" (UID: \"90ed9755-badd-4897-be59-4ffb24a37b83\") " pod="openstack-operators/15a1c145fda98e6dff4d00edd620a8374d6690a1bd22f46b75c47a2ff9gxhn8" Oct 06 13:53:33 crc kubenswrapper[4757]: I1006 13:53:33.030234 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8wjvp\" (UniqueName: \"kubernetes.io/projected/90ed9755-badd-4897-be59-4ffb24a37b83-kube-api-access-8wjvp\") pod \"15a1c145fda98e6dff4d00edd620a8374d6690a1bd22f46b75c47a2ff9gxhn8\" (UID: \"90ed9755-badd-4897-be59-4ffb24a37b83\") " pod="openstack-operators/15a1c145fda98e6dff4d00edd620a8374d6690a1bd22f46b75c47a2ff9gxhn8" Oct 06 13:53:33 crc kubenswrapper[4757]: I1006 13:53:33.216253 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/15a1c145fda98e6dff4d00edd620a8374d6690a1bd22f46b75c47a2ff9gxhn8" Oct 06 13:53:33 crc kubenswrapper[4757]: I1006 13:53:33.473239 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/15a1c145fda98e6dff4d00edd620a8374d6690a1bd22f46b75c47a2ff9gxhn8"] Oct 06 13:53:33 crc kubenswrapper[4757]: W1006 13:53:33.477402 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod90ed9755_badd_4897_be59_4ffb24a37b83.slice/crio-2e0d25b264517ff8b39a9363d0d986a338b3f50237c5b93286a66d431a92bacd WatchSource:0}: Error finding container 2e0d25b264517ff8b39a9363d0d986a338b3f50237c5b93286a66d431a92bacd: Status 404 returned error can't find the container with id 2e0d25b264517ff8b39a9363d0d986a338b3f50237c5b93286a66d431a92bacd Oct 06 13:53:34 crc kubenswrapper[4757]: I1006 13:53:34.361071 4757 patch_prober.go:28] interesting pod/machine-config-daemon-7tb7h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 06 13:53:34 crc kubenswrapper[4757]: I1006 13:53:34.361177 4757 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 06 13:53:34 crc kubenswrapper[4757]: I1006 13:53:34.362962 4757 generic.go:334] "Generic (PLEG): container finished" podID="90ed9755-badd-4897-be59-4ffb24a37b83" containerID="53615929d61b009aa57712b3a892d0d8e6e0ce63374b681736980df751367b8c" exitCode=0 Oct 06 13:53:34 crc kubenswrapper[4757]: I1006 13:53:34.363113 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/15a1c145fda98e6dff4d00edd620a8374d6690a1bd22f46b75c47a2ff9gxhn8" event={"ID":"90ed9755-badd-4897-be59-4ffb24a37b83","Type":"ContainerDied","Data":"53615929d61b009aa57712b3a892d0d8e6e0ce63374b681736980df751367b8c"} Oct 06 13:53:34 crc kubenswrapper[4757]: I1006 13:53:34.363188 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/15a1c145fda98e6dff4d00edd620a8374d6690a1bd22f46b75c47a2ff9gxhn8" event={"ID":"90ed9755-badd-4897-be59-4ffb24a37b83","Type":"ContainerStarted","Data":"2e0d25b264517ff8b39a9363d0d986a338b3f50237c5b93286a66d431a92bacd"} Oct 06 13:53:35 crc kubenswrapper[4757]: I1006 13:53:35.370438 4757 generic.go:334] "Generic (PLEG): container finished" podID="90ed9755-badd-4897-be59-4ffb24a37b83" containerID="376d0ccea553e23727873b9d5c5b936f6a2e23a480f13c8f4183ae69b10e1a56" exitCode=0 Oct 06 13:53:35 crc kubenswrapper[4757]: I1006 13:53:35.370642 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/15a1c145fda98e6dff4d00edd620a8374d6690a1bd22f46b75c47a2ff9gxhn8" event={"ID":"90ed9755-badd-4897-be59-4ffb24a37b83","Type":"ContainerDied","Data":"376d0ccea553e23727873b9d5c5b936f6a2e23a480f13c8f4183ae69b10e1a56"} Oct 06 13:53:36 crc kubenswrapper[4757]: I1006 13:53:36.381829 4757 generic.go:334] "Generic (PLEG): container finished" podID="90ed9755-badd-4897-be59-4ffb24a37b83" containerID="e0f82a8dc36ab3397e7dc8f7740f86e377871343b1baa1dc100e609c6a979b3f" exitCode=0 Oct 06 13:53:36 crc kubenswrapper[4757]: I1006 13:53:36.381911 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/15a1c145fda98e6dff4d00edd620a8374d6690a1bd22f46b75c47a2ff9gxhn8" event={"ID":"90ed9755-badd-4897-be59-4ffb24a37b83","Type":"ContainerDied","Data":"e0f82a8dc36ab3397e7dc8f7740f86e377871343b1baa1dc100e609c6a979b3f"} Oct 06 13:53:37 crc kubenswrapper[4757]: I1006 13:53:37.687290 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/15a1c145fda98e6dff4d00edd620a8374d6690a1bd22f46b75c47a2ff9gxhn8" Oct 06 13:53:37 crc kubenswrapper[4757]: I1006 13:53:37.778939 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8wjvp\" (UniqueName: \"kubernetes.io/projected/90ed9755-badd-4897-be59-4ffb24a37b83-kube-api-access-8wjvp\") pod \"90ed9755-badd-4897-be59-4ffb24a37b83\" (UID: \"90ed9755-badd-4897-be59-4ffb24a37b83\") " Oct 06 13:53:37 crc kubenswrapper[4757]: I1006 13:53:37.779055 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/90ed9755-badd-4897-be59-4ffb24a37b83-util\") pod \"90ed9755-badd-4897-be59-4ffb24a37b83\" (UID: \"90ed9755-badd-4897-be59-4ffb24a37b83\") " Oct 06 13:53:37 crc kubenswrapper[4757]: I1006 13:53:37.779133 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/90ed9755-badd-4897-be59-4ffb24a37b83-bundle\") pod \"90ed9755-badd-4897-be59-4ffb24a37b83\" (UID: \"90ed9755-badd-4897-be59-4ffb24a37b83\") " Oct 06 13:53:37 crc kubenswrapper[4757]: I1006 13:53:37.780583 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/90ed9755-badd-4897-be59-4ffb24a37b83-bundle" (OuterVolumeSpecName: "bundle") pod "90ed9755-badd-4897-be59-4ffb24a37b83" (UID: "90ed9755-badd-4897-be59-4ffb24a37b83"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 13:53:37 crc kubenswrapper[4757]: I1006 13:53:37.788123 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/90ed9755-badd-4897-be59-4ffb24a37b83-kube-api-access-8wjvp" (OuterVolumeSpecName: "kube-api-access-8wjvp") pod "90ed9755-badd-4897-be59-4ffb24a37b83" (UID: "90ed9755-badd-4897-be59-4ffb24a37b83"). InnerVolumeSpecName "kube-api-access-8wjvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:53:37 crc kubenswrapper[4757]: I1006 13:53:37.812297 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/90ed9755-badd-4897-be59-4ffb24a37b83-util" (OuterVolumeSpecName: "util") pod "90ed9755-badd-4897-be59-4ffb24a37b83" (UID: "90ed9755-badd-4897-be59-4ffb24a37b83"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 13:53:37 crc kubenswrapper[4757]: I1006 13:53:37.880528 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8wjvp\" (UniqueName: \"kubernetes.io/projected/90ed9755-badd-4897-be59-4ffb24a37b83-kube-api-access-8wjvp\") on node \"crc\" DevicePath \"\"" Oct 06 13:53:37 crc kubenswrapper[4757]: I1006 13:53:37.880761 4757 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/90ed9755-badd-4897-be59-4ffb24a37b83-util\") on node \"crc\" DevicePath \"\"" Oct 06 13:53:37 crc kubenswrapper[4757]: I1006 13:53:37.880902 4757 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/90ed9755-badd-4897-be59-4ffb24a37b83-bundle\") on node \"crc\" DevicePath \"\"" Oct 06 13:53:38 crc kubenswrapper[4757]: I1006 13:53:38.397063 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/15a1c145fda98e6dff4d00edd620a8374d6690a1bd22f46b75c47a2ff9gxhn8" event={"ID":"90ed9755-badd-4897-be59-4ffb24a37b83","Type":"ContainerDied","Data":"2e0d25b264517ff8b39a9363d0d986a338b3f50237c5b93286a66d431a92bacd"} Oct 06 13:53:38 crc kubenswrapper[4757]: I1006 13:53:38.397169 4757 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2e0d25b264517ff8b39a9363d0d986a338b3f50237c5b93286a66d431a92bacd" Oct 06 13:53:38 crc kubenswrapper[4757]: I1006 13:53:38.397192 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/15a1c145fda98e6dff4d00edd620a8374d6690a1bd22f46b75c47a2ff9gxhn8" Oct 06 13:53:45 crc kubenswrapper[4757]: I1006 13:53:45.498491 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-operator-6975b4f4b9-44lxq"] Oct 06 13:53:45 crc kubenswrapper[4757]: E1006 13:53:45.499314 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="90ed9755-badd-4897-be59-4ffb24a37b83" containerName="extract" Oct 06 13:53:45 crc kubenswrapper[4757]: I1006 13:53:45.499326 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="90ed9755-badd-4897-be59-4ffb24a37b83" containerName="extract" Oct 06 13:53:45 crc kubenswrapper[4757]: E1006 13:53:45.499345 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="90ed9755-badd-4897-be59-4ffb24a37b83" containerName="pull" Oct 06 13:53:45 crc kubenswrapper[4757]: I1006 13:53:45.499351 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="90ed9755-badd-4897-be59-4ffb24a37b83" containerName="pull" Oct 06 13:53:45 crc kubenswrapper[4757]: E1006 13:53:45.499359 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="90ed9755-badd-4897-be59-4ffb24a37b83" containerName="util" Oct 06 13:53:45 crc kubenswrapper[4757]: I1006 13:53:45.499364 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="90ed9755-badd-4897-be59-4ffb24a37b83" containerName="util" Oct 06 13:53:45 crc kubenswrapper[4757]: I1006 13:53:45.499463 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="90ed9755-badd-4897-be59-4ffb24a37b83" containerName="extract" Oct 06 13:53:45 crc kubenswrapper[4757]: I1006 13:53:45.500054 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-6975b4f4b9-44lxq" Oct 06 13:53:45 crc kubenswrapper[4757]: I1006 13:53:45.501613 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-operator-dockercfg-2wjt2" Oct 06 13:53:45 crc kubenswrapper[4757]: I1006 13:53:45.540529 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-6975b4f4b9-44lxq"] Oct 06 13:53:45 crc kubenswrapper[4757]: I1006 13:53:45.688896 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mkg76\" (UniqueName: \"kubernetes.io/projected/8582ecb4-98c5-4542-8ffc-d5ff7e8ce6d8-kube-api-access-mkg76\") pod \"openstack-operator-controller-operator-6975b4f4b9-44lxq\" (UID: \"8582ecb4-98c5-4542-8ffc-d5ff7e8ce6d8\") " pod="openstack-operators/openstack-operator-controller-operator-6975b4f4b9-44lxq" Oct 06 13:53:45 crc kubenswrapper[4757]: I1006 13:53:45.790526 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mkg76\" (UniqueName: \"kubernetes.io/projected/8582ecb4-98c5-4542-8ffc-d5ff7e8ce6d8-kube-api-access-mkg76\") pod \"openstack-operator-controller-operator-6975b4f4b9-44lxq\" (UID: \"8582ecb4-98c5-4542-8ffc-d5ff7e8ce6d8\") " pod="openstack-operators/openstack-operator-controller-operator-6975b4f4b9-44lxq" Oct 06 13:53:45 crc kubenswrapper[4757]: I1006 13:53:45.812785 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mkg76\" (UniqueName: \"kubernetes.io/projected/8582ecb4-98c5-4542-8ffc-d5ff7e8ce6d8-kube-api-access-mkg76\") pod \"openstack-operator-controller-operator-6975b4f4b9-44lxq\" (UID: \"8582ecb4-98c5-4542-8ffc-d5ff7e8ce6d8\") " pod="openstack-operators/openstack-operator-controller-operator-6975b4f4b9-44lxq" Oct 06 13:53:45 crc kubenswrapper[4757]: I1006 13:53:45.817420 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-6975b4f4b9-44lxq" Oct 06 13:53:46 crc kubenswrapper[4757]: I1006 13:53:46.252243 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-6975b4f4b9-44lxq"] Oct 06 13:53:46 crc kubenswrapper[4757]: I1006 13:53:46.449618 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-6975b4f4b9-44lxq" event={"ID":"8582ecb4-98c5-4542-8ffc-d5ff7e8ce6d8","Type":"ContainerStarted","Data":"4ca647522504f4daa819924d02ae0d10e0524088cde97071b3f2020e90c70596"} Oct 06 13:53:50 crc kubenswrapper[4757]: I1006 13:53:50.476078 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-6975b4f4b9-44lxq" event={"ID":"8582ecb4-98c5-4542-8ffc-d5ff7e8ce6d8","Type":"ContainerStarted","Data":"a868bf93296267dba1fa4572201765631c0993cbc36f9a47ff6794d7b4915d26"} Oct 06 13:53:52 crc kubenswrapper[4757]: I1006 13:53:52.512664 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-6975b4f4b9-44lxq" event={"ID":"8582ecb4-98c5-4542-8ffc-d5ff7e8ce6d8","Type":"ContainerStarted","Data":"80895c6b0c351aaa0db87517a74a456a3ae50a1d64d4e72c6ace481a5bca2a8c"} Oct 06 13:53:52 crc kubenswrapper[4757]: I1006 13:53:52.513132 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-operator-6975b4f4b9-44lxq" Oct 06 13:53:52 crc kubenswrapper[4757]: I1006 13:53:52.574292 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-operator-6975b4f4b9-44lxq" podStartSLOduration=1.770837711 podStartE2EDuration="7.574256897s" podCreationTimestamp="2025-10-06 13:53:45 +0000 UTC" firstStartedPulling="2025-10-06 13:53:46.262854166 +0000 UTC m=+914.760172703" lastFinishedPulling="2025-10-06 13:53:52.066273332 +0000 UTC m=+920.563591889" observedRunningTime="2025-10-06 13:53:52.566266334 +0000 UTC m=+921.063584921" watchObservedRunningTime="2025-10-06 13:53:52.574256897 +0000 UTC m=+921.071575514" Oct 06 13:53:55 crc kubenswrapper[4757]: I1006 13:53:55.821828 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-operator-6975b4f4b9-44lxq" Oct 06 13:54:04 crc kubenswrapper[4757]: I1006 13:54:04.361620 4757 patch_prober.go:28] interesting pod/machine-config-daemon-7tb7h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 06 13:54:04 crc kubenswrapper[4757]: I1006 13:54:04.362464 4757 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 06 13:54:29 crc kubenswrapper[4757]: I1006 13:54:29.966490 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-64f56ff694-bwzkj"] Oct 06 13:54:29 crc kubenswrapper[4757]: I1006 13:54:29.968885 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-64f56ff694-bwzkj" Oct 06 13:54:29 crc kubenswrapper[4757]: I1006 13:54:29.971995 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-wjn85" Oct 06 13:54:29 crc kubenswrapper[4757]: I1006 13:54:29.976582 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-64f56ff694-bwzkj"] Oct 06 13:54:29 crc kubenswrapper[4757]: I1006 13:54:29.981483 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/cinder-operator-controller-manager-84bd8f6848-cm2d5"] Oct 06 13:54:29 crc kubenswrapper[4757]: I1006 13:54:29.983478 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-84bd8f6848-cm2d5" Oct 06 13:54:29 crc kubenswrapper[4757]: I1006 13:54:29.985725 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-zrxzk" Oct 06 13:54:29 crc kubenswrapper[4757]: I1006 13:54:29.994007 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/designate-operator-controller-manager-58d86cd59d-8dq6w"] Oct 06 13:54:29 crc kubenswrapper[4757]: I1006 13:54:29.995148 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-58d86cd59d-8dq6w" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.001602 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-tdjcq" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.010079 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-84bd8f6848-cm2d5"] Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.024350 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-58d86cd59d-8dq6w"] Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.035696 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-fd648f65-rbrjz"] Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.036641 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-fd648f65-rbrjz" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.038572 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-v9s4d" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.041874 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tzdcd\" (UniqueName: \"kubernetes.io/projected/b539d43b-476d-46dc-944b-47ba64f84566-kube-api-access-tzdcd\") pod \"cinder-operator-controller-manager-84bd8f6848-cm2d5\" (UID: \"b539d43b-476d-46dc-944b-47ba64f84566\") " pod="openstack-operators/cinder-operator-controller-manager-84bd8f6848-cm2d5" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.042128 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pwqc4\" (UniqueName: \"kubernetes.io/projected/e6876774-af7e-4e5e-bed0-db5f2ac668b9-kube-api-access-pwqc4\") pod \"designate-operator-controller-manager-58d86cd59d-8dq6w\" (UID: \"e6876774-af7e-4e5e-bed0-db5f2ac668b9\") " pod="openstack-operators/designate-operator-controller-manager-58d86cd59d-8dq6w" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.042197 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pccfj\" (UniqueName: \"kubernetes.io/projected/44a67ef5-395a-4ba1-8572-847e4bc6e4a1-kube-api-access-pccfj\") pod \"barbican-operator-controller-manager-64f56ff694-bwzkj\" (UID: \"44a67ef5-395a-4ba1-8572-847e4bc6e4a1\") " pod="openstack-operators/barbican-operator-controller-manager-64f56ff694-bwzkj" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.046169 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-fd648f65-rbrjz"] Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.066908 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/heat-operator-controller-manager-7ccfc8cf49-np2qg"] Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.068167 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-7ccfc8cf49-np2qg" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.071072 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-5b477879bc-7z4cc"] Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.072017 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-5b477879bc-7z4cc" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.076860 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-7ccfc8cf49-np2qg"] Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.078920 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-j9jlg" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.079182 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"heat-operator-controller-manager-dockercfg-p7jt5" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.088976 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-5b477879bc-7z4cc"] Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.102601 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-84788b6bc5-ghbgp"] Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.103635 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-84788b6bc5-ghbgp" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.105768 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-jt9rh" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.105986 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-webhook-server-cert" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.151207 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pwqc4\" (UniqueName: \"kubernetes.io/projected/e6876774-af7e-4e5e-bed0-db5f2ac668b9-kube-api-access-pwqc4\") pod \"designate-operator-controller-manager-58d86cd59d-8dq6w\" (UID: \"e6876774-af7e-4e5e-bed0-db5f2ac668b9\") " pod="openstack-operators/designate-operator-controller-manager-58d86cd59d-8dq6w" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.151361 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vhqpn\" (UniqueName: \"kubernetes.io/projected/28e3257e-3a28-4a92-8658-936b70dd1b79-kube-api-access-vhqpn\") pod \"horizon-operator-controller-manager-5b477879bc-7z4cc\" (UID: \"28e3257e-3a28-4a92-8658-936b70dd1b79\") " pod="openstack-operators/horizon-operator-controller-manager-5b477879bc-7z4cc" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.151482 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pccfj\" (UniqueName: \"kubernetes.io/projected/44a67ef5-395a-4ba1-8572-847e4bc6e4a1-kube-api-access-pccfj\") pod \"barbican-operator-controller-manager-64f56ff694-bwzkj\" (UID: \"44a67ef5-395a-4ba1-8572-847e4bc6e4a1\") " pod="openstack-operators/barbican-operator-controller-manager-64f56ff694-bwzkj" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.151564 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ec5d9f24-e11c-4966-b85d-cfb960db9568-cert\") pod \"infra-operator-controller-manager-84788b6bc5-ghbgp\" (UID: \"ec5d9f24-e11c-4966-b85d-cfb960db9568\") " pod="openstack-operators/infra-operator-controller-manager-84788b6bc5-ghbgp" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.151665 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tzdcd\" (UniqueName: \"kubernetes.io/projected/b539d43b-476d-46dc-944b-47ba64f84566-kube-api-access-tzdcd\") pod \"cinder-operator-controller-manager-84bd8f6848-cm2d5\" (UID: \"b539d43b-476d-46dc-944b-47ba64f84566\") " pod="openstack-operators/cinder-operator-controller-manager-84bd8f6848-cm2d5" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.151690 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fb8k7\" (UniqueName: \"kubernetes.io/projected/ec5d9f24-e11c-4966-b85d-cfb960db9568-kube-api-access-fb8k7\") pod \"infra-operator-controller-manager-84788b6bc5-ghbgp\" (UID: \"ec5d9f24-e11c-4966-b85d-cfb960db9568\") " pod="openstack-operators/infra-operator-controller-manager-84788b6bc5-ghbgp" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.151789 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n6xhr\" (UniqueName: \"kubernetes.io/projected/5f9c96a5-1d49-48a6-89d4-56fdea632598-kube-api-access-n6xhr\") pod \"heat-operator-controller-manager-7ccfc8cf49-np2qg\" (UID: \"5f9c96a5-1d49-48a6-89d4-56fdea632598\") " pod="openstack-operators/heat-operator-controller-manager-7ccfc8cf49-np2qg" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.151933 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vkpnd\" (UniqueName: \"kubernetes.io/projected/deeb9237-8930-41f6-94e5-bd012cca4f95-kube-api-access-vkpnd\") pod \"glance-operator-controller-manager-fd648f65-rbrjz\" (UID: \"deeb9237-8930-41f6-94e5-bd012cca4f95\") " pod="openstack-operators/glance-operator-controller-manager-fd648f65-rbrjz" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.203191 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tzdcd\" (UniqueName: \"kubernetes.io/projected/b539d43b-476d-46dc-944b-47ba64f84566-kube-api-access-tzdcd\") pod \"cinder-operator-controller-manager-84bd8f6848-cm2d5\" (UID: \"b539d43b-476d-46dc-944b-47ba64f84566\") " pod="openstack-operators/cinder-operator-controller-manager-84bd8f6848-cm2d5" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.210598 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pwqc4\" (UniqueName: \"kubernetes.io/projected/e6876774-af7e-4e5e-bed0-db5f2ac668b9-kube-api-access-pwqc4\") pod \"designate-operator-controller-manager-58d86cd59d-8dq6w\" (UID: \"e6876774-af7e-4e5e-bed0-db5f2ac668b9\") " pod="openstack-operators/designate-operator-controller-manager-58d86cd59d-8dq6w" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.212128 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pccfj\" (UniqueName: \"kubernetes.io/projected/44a67ef5-395a-4ba1-8572-847e4bc6e4a1-kube-api-access-pccfj\") pod \"barbican-operator-controller-manager-64f56ff694-bwzkj\" (UID: \"44a67ef5-395a-4ba1-8572-847e4bc6e4a1\") " pod="openstack-operators/barbican-operator-controller-manager-64f56ff694-bwzkj" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.237705 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ironic-operator-controller-manager-5467f8988c-w9lhz"] Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.245872 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-84788b6bc5-ghbgp"] Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.245922 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-5b84cc7657-2lh74"] Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.247505 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-5b84cc7657-2lh74" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.248439 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-5467f8988c-w9lhz" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.254923 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-jnznf" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.255581 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-5467f8988c-w9lhz"] Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.256484 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gqcqd\" (UniqueName: \"kubernetes.io/projected/904d882e-dbf9-4f3c-897c-6cacf5a38057-kube-api-access-gqcqd\") pod \"ironic-operator-controller-manager-5467f8988c-w9lhz\" (UID: \"904d882e-dbf9-4f3c-897c-6cacf5a38057\") " pod="openstack-operators/ironic-operator-controller-manager-5467f8988c-w9lhz" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.256545 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vhqpn\" (UniqueName: \"kubernetes.io/projected/28e3257e-3a28-4a92-8658-936b70dd1b79-kube-api-access-vhqpn\") pod \"horizon-operator-controller-manager-5b477879bc-7z4cc\" (UID: \"28e3257e-3a28-4a92-8658-936b70dd1b79\") " pod="openstack-operators/horizon-operator-controller-manager-5b477879bc-7z4cc" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.256590 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lqd4w\" (UniqueName: \"kubernetes.io/projected/370a8b4d-2c86-4920-91c9-19834ea66e0e-kube-api-access-lqd4w\") pod \"keystone-operator-controller-manager-5b84cc7657-2lh74\" (UID: \"370a8b4d-2c86-4920-91c9-19834ea66e0e\") " pod="openstack-operators/keystone-operator-controller-manager-5b84cc7657-2lh74" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.256652 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ec5d9f24-e11c-4966-b85d-cfb960db9568-cert\") pod \"infra-operator-controller-manager-84788b6bc5-ghbgp\" (UID: \"ec5d9f24-e11c-4966-b85d-cfb960db9568\") " pod="openstack-operators/infra-operator-controller-manager-84788b6bc5-ghbgp" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.256705 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fb8k7\" (UniqueName: \"kubernetes.io/projected/ec5d9f24-e11c-4966-b85d-cfb960db9568-kube-api-access-fb8k7\") pod \"infra-operator-controller-manager-84788b6bc5-ghbgp\" (UID: \"ec5d9f24-e11c-4966-b85d-cfb960db9568\") " pod="openstack-operators/infra-operator-controller-manager-84788b6bc5-ghbgp" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.256760 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n6xhr\" (UniqueName: \"kubernetes.io/projected/5f9c96a5-1d49-48a6-89d4-56fdea632598-kube-api-access-n6xhr\") pod \"heat-operator-controller-manager-7ccfc8cf49-np2qg\" (UID: \"5f9c96a5-1d49-48a6-89d4-56fdea632598\") " pod="openstack-operators/heat-operator-controller-manager-7ccfc8cf49-np2qg" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.256794 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vkpnd\" (UniqueName: \"kubernetes.io/projected/deeb9237-8930-41f6-94e5-bd012cca4f95-kube-api-access-vkpnd\") pod \"glance-operator-controller-manager-fd648f65-rbrjz\" (UID: \"deeb9237-8930-41f6-94e5-bd012cca4f95\") " pod="openstack-operators/glance-operator-controller-manager-fd648f65-rbrjz" Oct 06 13:54:30 crc kubenswrapper[4757]: E1006 13:54:30.257029 4757 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Oct 06 13:54:30 crc kubenswrapper[4757]: E1006 13:54:30.257087 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ec5d9f24-e11c-4966-b85d-cfb960db9568-cert podName:ec5d9f24-e11c-4966-b85d-cfb960db9568 nodeName:}" failed. No retries permitted until 2025-10-06 13:54:30.757067731 +0000 UTC m=+959.254386268 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/ec5d9f24-e11c-4966-b85d-cfb960db9568-cert") pod "infra-operator-controller-manager-84788b6bc5-ghbgp" (UID: "ec5d9f24-e11c-4966-b85d-cfb960db9568") : secret "infra-operator-webhook-server-cert" not found Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.258616 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-5b84cc7657-2lh74"] Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.267515 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-xpqrj" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.279754 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-7cb48dbc-k7npw"] Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.281919 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-7cb48dbc-k7npw" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.283919 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-7cb48dbc-k7npw"] Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.288236 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-2lnfj" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.291010 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vkpnd\" (UniqueName: \"kubernetes.io/projected/deeb9237-8930-41f6-94e5-bd012cca4f95-kube-api-access-vkpnd\") pod \"glance-operator-controller-manager-fd648f65-rbrjz\" (UID: \"deeb9237-8930-41f6-94e5-bd012cca4f95\") " pod="openstack-operators/glance-operator-controller-manager-fd648f65-rbrjz" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.291076 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-d6c9dc5bc-tsk8p"] Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.292373 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-d6c9dc5bc-tsk8p" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.294760 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/neutron-operator-controller-manager-69b956fbf6-49zhk"] Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.295736 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-69b956fbf6-49zhk" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.296979 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-64f56ff694-bwzkj" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.304498 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-6c9b57c67-qqbjc"] Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.304860 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-rxxbd" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.305144 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-fb2z2" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.305910 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-69b956fbf6-49zhk"] Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.306080 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-6c9b57c67-qqbjc" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.308781 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-d6c9dc5bc-tsk8p"] Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.310688 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-6c9b57c67-qqbjc"] Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.311791 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-84bd8f6848-cm2d5" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.315861 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n6xhr\" (UniqueName: \"kubernetes.io/projected/5f9c96a5-1d49-48a6-89d4-56fdea632598-kube-api-access-n6xhr\") pod \"heat-operator-controller-manager-7ccfc8cf49-np2qg\" (UID: \"5f9c96a5-1d49-48a6-89d4-56fdea632598\") " pod="openstack-operators/heat-operator-controller-manager-7ccfc8cf49-np2qg" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.321748 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-cq47f" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.323116 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vhqpn\" (UniqueName: \"kubernetes.io/projected/28e3257e-3a28-4a92-8658-936b70dd1b79-kube-api-access-vhqpn\") pod \"horizon-operator-controller-manager-5b477879bc-7z4cc\" (UID: \"28e3257e-3a28-4a92-8658-936b70dd1b79\") " pod="openstack-operators/horizon-operator-controller-manager-5b477879bc-7z4cc" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.323163 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/octavia-operator-controller-manager-69f59f9d8-7qc5n"] Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.326042 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-58d86cd59d-8dq6w" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.340879 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-69f59f9d8-7qc5n"] Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.340916 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ovn-operator-controller-manager-54d485fd9-kqt4h"] Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.342509 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-69f59f9d8-7qc5n" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.354806 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-fd648f65-rbrjz" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.355317 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-lrlck" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.364205 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-84784cd75d696n7"] Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.367318 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-54d485fd9-kqt4h" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.368014 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gqcqd\" (UniqueName: \"kubernetes.io/projected/904d882e-dbf9-4f3c-897c-6cacf5a38057-kube-api-access-gqcqd\") pod \"ironic-operator-controller-manager-5467f8988c-w9lhz\" (UID: \"904d882e-dbf9-4f3c-897c-6cacf5a38057\") " pod="openstack-operators/ironic-operator-controller-manager-5467f8988c-w9lhz" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.368048 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-92bsb\" (UniqueName: \"kubernetes.io/projected/ef26c912-a862-423b-8e2f-2ad84c392e16-kube-api-access-92bsb\") pod \"neutron-operator-controller-manager-69b956fbf6-49zhk\" (UID: \"ef26c912-a862-423b-8e2f-2ad84c392e16\") " pod="openstack-operators/neutron-operator-controller-manager-69b956fbf6-49zhk" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.368068 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k5t5m\" (UniqueName: \"kubernetes.io/projected/9f02bbb6-9980-4cb4-9811-5ab86eb2e6cc-kube-api-access-k5t5m\") pod \"mariadb-operator-controller-manager-d6c9dc5bc-tsk8p\" (UID: \"9f02bbb6-9980-4cb4-9811-5ab86eb2e6cc\") " pod="openstack-operators/mariadb-operator-controller-manager-d6c9dc5bc-tsk8p" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.368085 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l62fj\" (UniqueName: \"kubernetes.io/projected/52d15369-93d1-4240-9c8a-22fc91e31b2b-kube-api-access-l62fj\") pod \"nova-operator-controller-manager-6c9b57c67-qqbjc\" (UID: \"52d15369-93d1-4240-9c8a-22fc91e31b2b\") " pod="openstack-operators/nova-operator-controller-manager-6c9b57c67-qqbjc" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.368118 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jgk6k\" (UniqueName: \"kubernetes.io/projected/0c353b7c-52c9-490a-ab96-7d47a1dae189-kube-api-access-jgk6k\") pod \"manila-operator-controller-manager-7cb48dbc-k7npw\" (UID: \"0c353b7c-52c9-490a-ab96-7d47a1dae189\") " pod="openstack-operators/manila-operator-controller-manager-7cb48dbc-k7npw" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.368139 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lqd4w\" (UniqueName: \"kubernetes.io/projected/370a8b4d-2c86-4920-91c9-19834ea66e0e-kube-api-access-lqd4w\") pod \"keystone-operator-controller-manager-5b84cc7657-2lh74\" (UID: \"370a8b4d-2c86-4920-91c9-19834ea66e0e\") " pod="openstack-operators/keystone-operator-controller-manager-5b84cc7657-2lh74" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.368186 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g6db6\" (UniqueName: \"kubernetes.io/projected/4b01c123-c126-49d3-a5f7-4f85d7d9466a-kube-api-access-g6db6\") pod \"octavia-operator-controller-manager-69f59f9d8-7qc5n\" (UID: \"4b01c123-c126-49d3-a5f7-4f85d7d9466a\") " pod="openstack-operators/octavia-operator-controller-manager-69f59f9d8-7qc5n" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.369062 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-84784cd75d696n7" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.369413 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ovn-operator-controller-manager-dockercfg-r4sjn" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.375424 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-webhook-server-cert" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.380156 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-54d485fd9-kqt4h"] Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.387468 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fb8k7\" (UniqueName: \"kubernetes.io/projected/ec5d9f24-e11c-4966-b85d-cfb960db9568-kube-api-access-fb8k7\") pod \"infra-operator-controller-manager-84788b6bc5-ghbgp\" (UID: \"ec5d9f24-e11c-4966-b85d-cfb960db9568\") " pod="openstack-operators/infra-operator-controller-manager-84788b6bc5-ghbgp" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.389394 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-94thd" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.399532 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gqcqd\" (UniqueName: \"kubernetes.io/projected/904d882e-dbf9-4f3c-897c-6cacf5a38057-kube-api-access-gqcqd\") pod \"ironic-operator-controller-manager-5467f8988c-w9lhz\" (UID: \"904d882e-dbf9-4f3c-897c-6cacf5a38057\") " pod="openstack-operators/ironic-operator-controller-manager-5467f8988c-w9lhz" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.413588 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lqd4w\" (UniqueName: \"kubernetes.io/projected/370a8b4d-2c86-4920-91c9-19834ea66e0e-kube-api-access-lqd4w\") pod \"keystone-operator-controller-manager-5b84cc7657-2lh74\" (UID: \"370a8b4d-2c86-4920-91c9-19834ea66e0e\") " pod="openstack-operators/keystone-operator-controller-manager-5b84cc7657-2lh74" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.426560 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/placement-operator-controller-manager-66f6d6849b-pwcvf"] Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.428156 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-66f6d6849b-pwcvf" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.434371 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-7ccfc8cf49-np2qg" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.473154 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-84784cd75d696n7"] Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.473447 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-5b477879bc-7z4cc" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.475441 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"placement-operator-controller-manager-dockercfg-bnhbv" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.475752 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k5t5m\" (UniqueName: \"kubernetes.io/projected/9f02bbb6-9980-4cb4-9811-5ab86eb2e6cc-kube-api-access-k5t5m\") pod \"mariadb-operator-controller-manager-d6c9dc5bc-tsk8p\" (UID: \"9f02bbb6-9980-4cb4-9811-5ab86eb2e6cc\") " pod="openstack-operators/mariadb-operator-controller-manager-d6c9dc5bc-tsk8p" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.475804 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-92bsb\" (UniqueName: \"kubernetes.io/projected/ef26c912-a862-423b-8e2f-2ad84c392e16-kube-api-access-92bsb\") pod \"neutron-operator-controller-manager-69b956fbf6-49zhk\" (UID: \"ef26c912-a862-423b-8e2f-2ad84c392e16\") " pod="openstack-operators/neutron-operator-controller-manager-69b956fbf6-49zhk" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.475828 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l62fj\" (UniqueName: \"kubernetes.io/projected/52d15369-93d1-4240-9c8a-22fc91e31b2b-kube-api-access-l62fj\") pod \"nova-operator-controller-manager-6c9b57c67-qqbjc\" (UID: \"52d15369-93d1-4240-9c8a-22fc91e31b2b\") " pod="openstack-operators/nova-operator-controller-manager-6c9b57c67-qqbjc" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.475855 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jgk6k\" (UniqueName: \"kubernetes.io/projected/0c353b7c-52c9-490a-ab96-7d47a1dae189-kube-api-access-jgk6k\") pod \"manila-operator-controller-manager-7cb48dbc-k7npw\" (UID: \"0c353b7c-52c9-490a-ab96-7d47a1dae189\") " pod="openstack-operators/manila-operator-controller-manager-7cb48dbc-k7npw" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.475905 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g6db6\" (UniqueName: \"kubernetes.io/projected/4b01c123-c126-49d3-a5f7-4f85d7d9466a-kube-api-access-g6db6\") pod \"octavia-operator-controller-manager-69f59f9d8-7qc5n\" (UID: \"4b01c123-c126-49d3-a5f7-4f85d7d9466a\") " pod="openstack-operators/octavia-operator-controller-manager-69f59f9d8-7qc5n" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.501159 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-76d5577b-szvb9"] Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.502208 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-76d5577b-szvb9" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.506645 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g6db6\" (UniqueName: \"kubernetes.io/projected/4b01c123-c126-49d3-a5f7-4f85d7d9466a-kube-api-access-g6db6\") pod \"octavia-operator-controller-manager-69f59f9d8-7qc5n\" (UID: \"4b01c123-c126-49d3-a5f7-4f85d7d9466a\") " pod="openstack-operators/octavia-operator-controller-manager-69f59f9d8-7qc5n" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.521949 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-92bsb\" (UniqueName: \"kubernetes.io/projected/ef26c912-a862-423b-8e2f-2ad84c392e16-kube-api-access-92bsb\") pod \"neutron-operator-controller-manager-69b956fbf6-49zhk\" (UID: \"ef26c912-a862-423b-8e2f-2ad84c392e16\") " pod="openstack-operators/neutron-operator-controller-manager-69b956fbf6-49zhk" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.523290 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-66f6d6849b-pwcvf"] Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.523710 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-z942l" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.539776 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l62fj\" (UniqueName: \"kubernetes.io/projected/52d15369-93d1-4240-9c8a-22fc91e31b2b-kube-api-access-l62fj\") pod \"nova-operator-controller-manager-6c9b57c67-qqbjc\" (UID: \"52d15369-93d1-4240-9c8a-22fc91e31b2b\") " pod="openstack-operators/nova-operator-controller-manager-6c9b57c67-qqbjc" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.540250 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k5t5m\" (UniqueName: \"kubernetes.io/projected/9f02bbb6-9980-4cb4-9811-5ab86eb2e6cc-kube-api-access-k5t5m\") pod \"mariadb-operator-controller-manager-d6c9dc5bc-tsk8p\" (UID: \"9f02bbb6-9980-4cb4-9811-5ab86eb2e6cc\") " pod="openstack-operators/mariadb-operator-controller-manager-d6c9dc5bc-tsk8p" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.543780 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-6c9b57c67-qqbjc" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.546460 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jgk6k\" (UniqueName: \"kubernetes.io/projected/0c353b7c-52c9-490a-ab96-7d47a1dae189-kube-api-access-jgk6k\") pod \"manila-operator-controller-manager-7cb48dbc-k7npw\" (UID: \"0c353b7c-52c9-490a-ab96-7d47a1dae189\") " pod="openstack-operators/manila-operator-controller-manager-7cb48dbc-k7npw" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.550481 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-76d5577b-szvb9"] Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.554924 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-d6c9dc5bc-tsk8p" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.566655 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-f589c7597-cnfmr"] Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.567983 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-f589c7597-cnfmr" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.570644 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-69f59f9d8-7qc5n" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.571173 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-f589c7597-cnfmr"] Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.574481 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"telemetry-operator-controller-manager-dockercfg-cxk2g" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.577356 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/518b074b-8b71-41d4-9105-b13aa20e3901-cert\") pod \"openstack-baremetal-operator-controller-manager-84784cd75d696n7\" (UID: \"518b074b-8b71-41d4-9105-b13aa20e3901\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-84784cd75d696n7" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.577388 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bqtws\" (UniqueName: \"kubernetes.io/projected/b30674df-1f48-4850-ae2c-7c464a84afea-kube-api-access-bqtws\") pod \"ovn-operator-controller-manager-54d485fd9-kqt4h\" (UID: \"b30674df-1f48-4850-ae2c-7c464a84afea\") " pod="openstack-operators/ovn-operator-controller-manager-54d485fd9-kqt4h" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.577418 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kvhf9\" (UniqueName: \"kubernetes.io/projected/8c111ee9-f736-4f86-98b6-590b9adfa58f-kube-api-access-kvhf9\") pod \"placement-operator-controller-manager-66f6d6849b-pwcvf\" (UID: \"8c111ee9-f736-4f86-98b6-590b9adfa58f\") " pod="openstack-operators/placement-operator-controller-manager-66f6d6849b-pwcvf" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.577484 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ns6lm\" (UniqueName: \"kubernetes.io/projected/518b074b-8b71-41d4-9105-b13aa20e3901-kube-api-access-ns6lm\") pod \"openstack-baremetal-operator-controller-manager-84784cd75d696n7\" (UID: \"518b074b-8b71-41d4-9105-b13aa20e3901\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-84784cd75d696n7" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.583888 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-5b84cc7657-2lh74" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.593558 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-6bb6dcddc-g9hjb"] Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.601617 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-6bb6dcddc-g9hjb" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.653852 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"test-operator-controller-manager-dockercfg-h2v9t" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.667264 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-5467f8988c-w9lhz" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.670371 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-6bb6dcddc-g9hjb"] Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.690653 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bkhws\" (UniqueName: \"kubernetes.io/projected/045efd67-1f80-49d4-be81-46310d11b717-kube-api-access-bkhws\") pod \"swift-operator-controller-manager-76d5577b-szvb9\" (UID: \"045efd67-1f80-49d4-be81-46310d11b717\") " pod="openstack-operators/swift-operator-controller-manager-76d5577b-szvb9" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.690784 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/518b074b-8b71-41d4-9105-b13aa20e3901-cert\") pod \"openstack-baremetal-operator-controller-manager-84784cd75d696n7\" (UID: \"518b074b-8b71-41d4-9105-b13aa20e3901\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-84784cd75d696n7" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.690818 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bqtws\" (UniqueName: \"kubernetes.io/projected/b30674df-1f48-4850-ae2c-7c464a84afea-kube-api-access-bqtws\") pod \"ovn-operator-controller-manager-54d485fd9-kqt4h\" (UID: \"b30674df-1f48-4850-ae2c-7c464a84afea\") " pod="openstack-operators/ovn-operator-controller-manager-54d485fd9-kqt4h" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.690884 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kvhf9\" (UniqueName: \"kubernetes.io/projected/8c111ee9-f736-4f86-98b6-590b9adfa58f-kube-api-access-kvhf9\") pod \"placement-operator-controller-manager-66f6d6849b-pwcvf\" (UID: \"8c111ee9-f736-4f86-98b6-590b9adfa58f\") " pod="openstack-operators/placement-operator-controller-manager-66f6d6849b-pwcvf" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.690974 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f6qp4\" (UniqueName: \"kubernetes.io/projected/f328cb34-f1aa-49c7-8b79-c465fa4cd522-kube-api-access-f6qp4\") pod \"telemetry-operator-controller-manager-f589c7597-cnfmr\" (UID: \"f328cb34-f1aa-49c7-8b79-c465fa4cd522\") " pod="openstack-operators/telemetry-operator-controller-manager-f589c7597-cnfmr" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.691254 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ns6lm\" (UniqueName: \"kubernetes.io/projected/518b074b-8b71-41d4-9105-b13aa20e3901-kube-api-access-ns6lm\") pod \"openstack-baremetal-operator-controller-manager-84784cd75d696n7\" (UID: \"518b074b-8b71-41d4-9105-b13aa20e3901\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-84784cd75d696n7" Oct 06 13:54:30 crc kubenswrapper[4757]: E1006 13:54:30.692548 4757 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Oct 06 13:54:30 crc kubenswrapper[4757]: E1006 13:54:30.692603 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/518b074b-8b71-41d4-9105-b13aa20e3901-cert podName:518b074b-8b71-41d4-9105-b13aa20e3901 nodeName:}" failed. No retries permitted until 2025-10-06 13:54:31.192587505 +0000 UTC m=+959.689906042 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/518b074b-8b71-41d4-9105-b13aa20e3901-cert") pod "openstack-baremetal-operator-controller-manager-84784cd75d696n7" (UID: "518b074b-8b71-41d4-9105-b13aa20e3901") : secret "openstack-baremetal-operator-webhook-server-cert" not found Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.703893 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-7cb48dbc-k7npw" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.787598 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-69b956fbf6-49zhk" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.796408 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ec5d9f24-e11c-4966-b85d-cfb960db9568-cert\") pod \"infra-operator-controller-manager-84788b6bc5-ghbgp\" (UID: \"ec5d9f24-e11c-4966-b85d-cfb960db9568\") " pod="openstack-operators/infra-operator-controller-manager-84788b6bc5-ghbgp" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.796452 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bkhws\" (UniqueName: \"kubernetes.io/projected/045efd67-1f80-49d4-be81-46310d11b717-kube-api-access-bkhws\") pod \"swift-operator-controller-manager-76d5577b-szvb9\" (UID: \"045efd67-1f80-49d4-be81-46310d11b717\") " pod="openstack-operators/swift-operator-controller-manager-76d5577b-szvb9" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.796525 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f6qp4\" (UniqueName: \"kubernetes.io/projected/f328cb34-f1aa-49c7-8b79-c465fa4cd522-kube-api-access-f6qp4\") pod \"telemetry-operator-controller-manager-f589c7597-cnfmr\" (UID: \"f328cb34-f1aa-49c7-8b79-c465fa4cd522\") " pod="openstack-operators/telemetry-operator-controller-manager-f589c7597-cnfmr" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.796581 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nhhvb\" (UniqueName: \"kubernetes.io/projected/37d8e975-46d7-46cd-8d20-4229c7ffaada-kube-api-access-nhhvb\") pod \"test-operator-controller-manager-6bb6dcddc-g9hjb\" (UID: \"37d8e975-46d7-46cd-8d20-4229c7ffaada\") " pod="openstack-operators/test-operator-controller-manager-6bb6dcddc-g9hjb" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.799589 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-5d98cc5575-shmfd"] Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.803303 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-5d98cc5575-shmfd" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.807595 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-64w7b" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.807975 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-5d98cc5575-shmfd"] Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.830549 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ec5d9f24-e11c-4966-b85d-cfb960db9568-cert\") pod \"infra-operator-controller-manager-84788b6bc5-ghbgp\" (UID: \"ec5d9f24-e11c-4966-b85d-cfb960db9568\") " pod="openstack-operators/infra-operator-controller-manager-84788b6bc5-ghbgp" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.832841 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bkhws\" (UniqueName: \"kubernetes.io/projected/045efd67-1f80-49d4-be81-46310d11b717-kube-api-access-bkhws\") pod \"swift-operator-controller-manager-76d5577b-szvb9\" (UID: \"045efd67-1f80-49d4-be81-46310d11b717\") " pod="openstack-operators/swift-operator-controller-manager-76d5577b-szvb9" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.842982 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bqtws\" (UniqueName: \"kubernetes.io/projected/b30674df-1f48-4850-ae2c-7c464a84afea-kube-api-access-bqtws\") pod \"ovn-operator-controller-manager-54d485fd9-kqt4h\" (UID: \"b30674df-1f48-4850-ae2c-7c464a84afea\") " pod="openstack-operators/ovn-operator-controller-manager-54d485fd9-kqt4h" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.847574 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ns6lm\" (UniqueName: \"kubernetes.io/projected/518b074b-8b71-41d4-9105-b13aa20e3901-kube-api-access-ns6lm\") pod \"openstack-baremetal-operator-controller-manager-84784cd75d696n7\" (UID: \"518b074b-8b71-41d4-9105-b13aa20e3901\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-84784cd75d696n7" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.847650 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f6qp4\" (UniqueName: \"kubernetes.io/projected/f328cb34-f1aa-49c7-8b79-c465fa4cd522-kube-api-access-f6qp4\") pod \"telemetry-operator-controller-manager-f589c7597-cnfmr\" (UID: \"f328cb34-f1aa-49c7-8b79-c465fa4cd522\") " pod="openstack-operators/telemetry-operator-controller-manager-f589c7597-cnfmr" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.851057 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-manager-867844c698-w92jl"] Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.851320 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kvhf9\" (UniqueName: \"kubernetes.io/projected/8c111ee9-f736-4f86-98b6-590b9adfa58f-kube-api-access-kvhf9\") pod \"placement-operator-controller-manager-66f6d6849b-pwcvf\" (UID: \"8c111ee9-f736-4f86-98b6-590b9adfa58f\") " pod="openstack-operators/placement-operator-controller-manager-66f6d6849b-pwcvf" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.852256 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-867844c698-w92jl" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.855044 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.856446 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-j8p5v" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.864006 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-867844c698-w92jl"] Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.872155 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-c7h67"] Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.872948 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-c7h67" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.874648 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-controller-manager-dockercfg-w9vsg" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.876888 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-c7h67"] Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.893994 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-54d485fd9-kqt4h" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.897369 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-f589c7597-cnfmr" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.898570 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nhhvb\" (UniqueName: \"kubernetes.io/projected/37d8e975-46d7-46cd-8d20-4229c7ffaada-kube-api-access-nhhvb\") pod \"test-operator-controller-manager-6bb6dcddc-g9hjb\" (UID: \"37d8e975-46d7-46cd-8d20-4229c7ffaada\") " pod="openstack-operators/test-operator-controller-manager-6bb6dcddc-g9hjb" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.898718 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/04761044-c4c0-4fcf-9229-4050ef0e64da-cert\") pod \"openstack-operator-controller-manager-867844c698-w92jl\" (UID: \"04761044-c4c0-4fcf-9229-4050ef0e64da\") " pod="openstack-operators/openstack-operator-controller-manager-867844c698-w92jl" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.898771 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7r9zv\" (UniqueName: \"kubernetes.io/projected/04761044-c4c0-4fcf-9229-4050ef0e64da-kube-api-access-7r9zv\") pod \"openstack-operator-controller-manager-867844c698-w92jl\" (UID: \"04761044-c4c0-4fcf-9229-4050ef0e64da\") " pod="openstack-operators/openstack-operator-controller-manager-867844c698-w92jl" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.898797 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rb8sl\" (UniqueName: \"kubernetes.io/projected/fa3aa8e8-3594-4e41-afa7-d59a965cde23-kube-api-access-rb8sl\") pod \"watcher-operator-controller-manager-5d98cc5575-shmfd\" (UID: \"fa3aa8e8-3594-4e41-afa7-d59a965cde23\") " pod="openstack-operators/watcher-operator-controller-manager-5d98cc5575-shmfd" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.898889 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vjtnc\" (UniqueName: \"kubernetes.io/projected/a1958ae9-ac1a-4ec6-9801-4c9c48f3c37c-kube-api-access-vjtnc\") pod \"rabbitmq-cluster-operator-manager-5f97d8c699-c7h67\" (UID: \"a1958ae9-ac1a-4ec6-9801-4c9c48f3c37c\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-c7h67" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.919132 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nhhvb\" (UniqueName: \"kubernetes.io/projected/37d8e975-46d7-46cd-8d20-4229c7ffaada-kube-api-access-nhhvb\") pod \"test-operator-controller-manager-6bb6dcddc-g9hjb\" (UID: \"37d8e975-46d7-46cd-8d20-4229c7ffaada\") " pod="openstack-operators/test-operator-controller-manager-6bb6dcddc-g9hjb" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.943420 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-6bb6dcddc-g9hjb" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.950469 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-66f6d6849b-pwcvf" Oct 06 13:54:30 crc kubenswrapper[4757]: I1006 13:54:30.997165 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-76d5577b-szvb9" Oct 06 13:54:31 crc kubenswrapper[4757]: I1006 13:54:31.000287 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/04761044-c4c0-4fcf-9229-4050ef0e64da-cert\") pod \"openstack-operator-controller-manager-867844c698-w92jl\" (UID: \"04761044-c4c0-4fcf-9229-4050ef0e64da\") " pod="openstack-operators/openstack-operator-controller-manager-867844c698-w92jl" Oct 06 13:54:31 crc kubenswrapper[4757]: I1006 13:54:31.000326 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7r9zv\" (UniqueName: \"kubernetes.io/projected/04761044-c4c0-4fcf-9229-4050ef0e64da-kube-api-access-7r9zv\") pod \"openstack-operator-controller-manager-867844c698-w92jl\" (UID: \"04761044-c4c0-4fcf-9229-4050ef0e64da\") " pod="openstack-operators/openstack-operator-controller-manager-867844c698-w92jl" Oct 06 13:54:31 crc kubenswrapper[4757]: I1006 13:54:31.000368 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rb8sl\" (UniqueName: \"kubernetes.io/projected/fa3aa8e8-3594-4e41-afa7-d59a965cde23-kube-api-access-rb8sl\") pod \"watcher-operator-controller-manager-5d98cc5575-shmfd\" (UID: \"fa3aa8e8-3594-4e41-afa7-d59a965cde23\") " pod="openstack-operators/watcher-operator-controller-manager-5d98cc5575-shmfd" Oct 06 13:54:31 crc kubenswrapper[4757]: I1006 13:54:31.000446 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vjtnc\" (UniqueName: \"kubernetes.io/projected/a1958ae9-ac1a-4ec6-9801-4c9c48f3c37c-kube-api-access-vjtnc\") pod \"rabbitmq-cluster-operator-manager-5f97d8c699-c7h67\" (UID: \"a1958ae9-ac1a-4ec6-9801-4c9c48f3c37c\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-c7h67" Oct 06 13:54:31 crc kubenswrapper[4757]: E1006 13:54:31.001638 4757 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Oct 06 13:54:31 crc kubenswrapper[4757]: E1006 13:54:31.001739 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/04761044-c4c0-4fcf-9229-4050ef0e64da-cert podName:04761044-c4c0-4fcf-9229-4050ef0e64da nodeName:}" failed. No retries permitted until 2025-10-06 13:54:31.501717424 +0000 UTC m=+959.999035961 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/04761044-c4c0-4fcf-9229-4050ef0e64da-cert") pod "openstack-operator-controller-manager-867844c698-w92jl" (UID: "04761044-c4c0-4fcf-9229-4050ef0e64da") : secret "webhook-server-cert" not found Oct 06 13:54:31 crc kubenswrapper[4757]: I1006 13:54:31.026639 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7r9zv\" (UniqueName: \"kubernetes.io/projected/04761044-c4c0-4fcf-9229-4050ef0e64da-kube-api-access-7r9zv\") pod \"openstack-operator-controller-manager-867844c698-w92jl\" (UID: \"04761044-c4c0-4fcf-9229-4050ef0e64da\") " pod="openstack-operators/openstack-operator-controller-manager-867844c698-w92jl" Oct 06 13:54:31 crc kubenswrapper[4757]: I1006 13:54:31.031870 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vjtnc\" (UniqueName: \"kubernetes.io/projected/a1958ae9-ac1a-4ec6-9801-4c9c48f3c37c-kube-api-access-vjtnc\") pod \"rabbitmq-cluster-operator-manager-5f97d8c699-c7h67\" (UID: \"a1958ae9-ac1a-4ec6-9801-4c9c48f3c37c\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-c7h67" Oct 06 13:54:31 crc kubenswrapper[4757]: I1006 13:54:31.031982 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rb8sl\" (UniqueName: \"kubernetes.io/projected/fa3aa8e8-3594-4e41-afa7-d59a965cde23-kube-api-access-rb8sl\") pod \"watcher-operator-controller-manager-5d98cc5575-shmfd\" (UID: \"fa3aa8e8-3594-4e41-afa7-d59a965cde23\") " pod="openstack-operators/watcher-operator-controller-manager-5d98cc5575-shmfd" Oct 06 13:54:31 crc kubenswrapper[4757]: I1006 13:54:31.104601 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-84788b6bc5-ghbgp" Oct 06 13:54:31 crc kubenswrapper[4757]: I1006 13:54:31.205297 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/518b074b-8b71-41d4-9105-b13aa20e3901-cert\") pod \"openstack-baremetal-operator-controller-manager-84784cd75d696n7\" (UID: \"518b074b-8b71-41d4-9105-b13aa20e3901\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-84784cd75d696n7" Oct 06 13:54:31 crc kubenswrapper[4757]: E1006 13:54:31.205481 4757 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Oct 06 13:54:31 crc kubenswrapper[4757]: E1006 13:54:31.205536 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/518b074b-8b71-41d4-9105-b13aa20e3901-cert podName:518b074b-8b71-41d4-9105-b13aa20e3901 nodeName:}" failed. No retries permitted until 2025-10-06 13:54:32.205518047 +0000 UTC m=+960.702836594 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/518b074b-8b71-41d4-9105-b13aa20e3901-cert") pod "openstack-baremetal-operator-controller-manager-84784cd75d696n7" (UID: "518b074b-8b71-41d4-9105-b13aa20e3901") : secret "openstack-baremetal-operator-webhook-server-cert" not found Oct 06 13:54:31 crc kubenswrapper[4757]: I1006 13:54:31.275575 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-5d98cc5575-shmfd" Oct 06 13:54:31 crc kubenswrapper[4757]: I1006 13:54:31.310371 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-c7h67" Oct 06 13:54:31 crc kubenswrapper[4757]: I1006 13:54:31.515671 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/04761044-c4c0-4fcf-9229-4050ef0e64da-cert\") pod \"openstack-operator-controller-manager-867844c698-w92jl\" (UID: \"04761044-c4c0-4fcf-9229-4050ef0e64da\") " pod="openstack-operators/openstack-operator-controller-manager-867844c698-w92jl" Oct 06 13:54:31 crc kubenswrapper[4757]: E1006 13:54:31.516472 4757 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Oct 06 13:54:31 crc kubenswrapper[4757]: E1006 13:54:31.516546 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/04761044-c4c0-4fcf-9229-4050ef0e64da-cert podName:04761044-c4c0-4fcf-9229-4050ef0e64da nodeName:}" failed. No retries permitted until 2025-10-06 13:54:32.516529196 +0000 UTC m=+961.013847733 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/04761044-c4c0-4fcf-9229-4050ef0e64da-cert") pod "openstack-operator-controller-manager-867844c698-w92jl" (UID: "04761044-c4c0-4fcf-9229-4050ef0e64da") : secret "webhook-server-cert" not found Oct 06 13:54:31 crc kubenswrapper[4757]: I1006 13:54:31.518911 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-7ccfc8cf49-np2qg"] Oct 06 13:54:31 crc kubenswrapper[4757]: W1006 13:54:31.521878 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5f9c96a5_1d49_48a6_89d4_56fdea632598.slice/crio-c84dca07adc486a123a7bd6014a6342af505ba78f69c9176c1446443e0db03ae WatchSource:0}: Error finding container c84dca07adc486a123a7bd6014a6342af505ba78f69c9176c1446443e0db03ae: Status 404 returned error can't find the container with id c84dca07adc486a123a7bd6014a6342af505ba78f69c9176c1446443e0db03ae Oct 06 13:54:31 crc kubenswrapper[4757]: I1006 13:54:31.559973 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-64f56ff694-bwzkj"] Oct 06 13:54:31 crc kubenswrapper[4757]: I1006 13:54:31.577551 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-84bd8f6848-cm2d5"] Oct 06 13:54:31 crc kubenswrapper[4757]: W1006 13:54:31.578224 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb539d43b_476d_46dc_944b_47ba64f84566.slice/crio-57b30ec27bc1de561cebab2829c766a3bfe27b6cab37ba89266b8a7b94bb0f56 WatchSource:0}: Error finding container 57b30ec27bc1de561cebab2829c766a3bfe27b6cab37ba89266b8a7b94bb0f56: Status 404 returned error can't find the container with id 57b30ec27bc1de561cebab2829c766a3bfe27b6cab37ba89266b8a7b94bb0f56 Oct 06 13:54:31 crc kubenswrapper[4757]: I1006 13:54:31.584600 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-fd648f65-rbrjz"] Oct 06 13:54:31 crc kubenswrapper[4757]: I1006 13:54:31.666970 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-5b477879bc-7z4cc"] Oct 06 13:54:31 crc kubenswrapper[4757]: I1006 13:54:31.866489 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-5b477879bc-7z4cc" event={"ID":"28e3257e-3a28-4a92-8658-936b70dd1b79","Type":"ContainerStarted","Data":"7fc2bf6eebc136d4c5b2eb079ebd620749e86a567fede93dec2c6b6e2b830cd5"} Oct 06 13:54:31 crc kubenswrapper[4757]: I1006 13:54:31.867831 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-7ccfc8cf49-np2qg" event={"ID":"5f9c96a5-1d49-48a6-89d4-56fdea632598","Type":"ContainerStarted","Data":"c84dca07adc486a123a7bd6014a6342af505ba78f69c9176c1446443e0db03ae"} Oct 06 13:54:31 crc kubenswrapper[4757]: I1006 13:54:31.868853 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-64f56ff694-bwzkj" event={"ID":"44a67ef5-395a-4ba1-8572-847e4bc6e4a1","Type":"ContainerStarted","Data":"3ed02ca418152922c10426c9f8ca03e0c51cacab7905543cb6a89fcaba629e8b"} Oct 06 13:54:31 crc kubenswrapper[4757]: I1006 13:54:31.869950 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-84bd8f6848-cm2d5" event={"ID":"b539d43b-476d-46dc-944b-47ba64f84566","Type":"ContainerStarted","Data":"57b30ec27bc1de561cebab2829c766a3bfe27b6cab37ba89266b8a7b94bb0f56"} Oct 06 13:54:31 crc kubenswrapper[4757]: I1006 13:54:31.870982 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-fd648f65-rbrjz" event={"ID":"deeb9237-8930-41f6-94e5-bd012cca4f95","Type":"ContainerStarted","Data":"ef72c1645ea8de0cf4ac2a4c2051ed89819f711bd42751118b28b503d3dc9d23"} Oct 06 13:54:31 crc kubenswrapper[4757]: I1006 13:54:31.969542 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-5b84cc7657-2lh74"] Oct 06 13:54:31 crc kubenswrapper[4757]: W1006 13:54:31.979859 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod370a8b4d_2c86_4920_91c9_19834ea66e0e.slice/crio-14d5226c0291b2cbfeb228ded1ff905c8ef973bb054476dd80f3517284c90d27 WatchSource:0}: Error finding container 14d5226c0291b2cbfeb228ded1ff905c8ef973bb054476dd80f3517284c90d27: Status 404 returned error can't find the container with id 14d5226c0291b2cbfeb228ded1ff905c8ef973bb054476dd80f3517284c90d27 Oct 06 13:54:32 crc kubenswrapper[4757]: I1006 13:54:32.012587 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-7cb48dbc-k7npw"] Oct 06 13:54:32 crc kubenswrapper[4757]: W1006 13:54:32.019553 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0c353b7c_52c9_490a_ab96_7d47a1dae189.slice/crio-119baf4acc4385a43f6b4b9b5bb1a92f6050cc4a7ff7a6cac2f9504c724bfde7 WatchSource:0}: Error finding container 119baf4acc4385a43f6b4b9b5bb1a92f6050cc4a7ff7a6cac2f9504c724bfde7: Status 404 returned error can't find the container with id 119baf4acc4385a43f6b4b9b5bb1a92f6050cc4a7ff7a6cac2f9504c724bfde7 Oct 06 13:54:32 crc kubenswrapper[4757]: I1006 13:54:32.037304 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-69f59f9d8-7qc5n"] Oct 06 13:54:32 crc kubenswrapper[4757]: I1006 13:54:32.048510 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-58d86cd59d-8dq6w"] Oct 06 13:54:32 crc kubenswrapper[4757]: I1006 13:54:32.056832 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-54d485fd9-kqt4h"] Oct 06 13:54:32 crc kubenswrapper[4757]: I1006 13:54:32.062365 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-5467f8988c-w9lhz"] Oct 06 13:54:32 crc kubenswrapper[4757]: I1006 13:54:32.084699 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-d6c9dc5bc-tsk8p"] Oct 06 13:54:32 crc kubenswrapper[4757]: W1006 13:54:32.087992 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf328cb34_f1aa_49c7_8b79_c465fa4cd522.slice/crio-32fc1f3eb4e85898b086ae00dc3c2881fb04d69906d07eb771077dce6fc55fa0 WatchSource:0}: Error finding container 32fc1f3eb4e85898b086ae00dc3c2881fb04d69906d07eb771077dce6fc55fa0: Status 404 returned error can't find the container with id 32fc1f3eb4e85898b086ae00dc3c2881fb04d69906d07eb771077dce6fc55fa0 Oct 06 13:54:32 crc kubenswrapper[4757]: E1006 13:54:32.107933 4757 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/telemetry-operator@sha256:bf55026ba10b80e1e24733078bd204cef8766d21a305fd000707a1e3b30ff52e,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-f6qp4,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-f589c7597-cnfmr_openstack-operators(f328cb34-f1aa-49c7-8b79-c465fa4cd522): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 06 13:54:32 crc kubenswrapper[4757]: E1006 13:54:32.107938 4757 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/test-operator@sha256:0daf76cc40ab619ae266b11defcc1b65beb22d859369e7b1b04de9169089a4cb,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-nhhvb,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-6bb6dcddc-g9hjb_openstack-operators(37d8e975-46d7-46cd-8d20-4229c7ffaada): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 06 13:54:32 crc kubenswrapper[4757]: I1006 13:54:32.114376 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-f589c7597-cnfmr"] Oct 06 13:54:32 crc kubenswrapper[4757]: E1006 13:54:32.118674 4757 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/swift-operator@sha256:637bb7b9ac308bc1e323391a3593b824f688090a856c83385814c17a571b1eed,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-bkhws,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-operator-controller-manager-76d5577b-szvb9_openstack-operators(045efd67-1f80-49d4-be81-46310d11b717): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 06 13:54:32 crc kubenswrapper[4757]: I1006 13:54:32.138148 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-6bb6dcddc-g9hjb"] Oct 06 13:54:32 crc kubenswrapper[4757]: I1006 13:54:32.148132 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-69b956fbf6-49zhk"] Oct 06 13:54:32 crc kubenswrapper[4757]: I1006 13:54:32.161720 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-6c9b57c67-qqbjc"] Oct 06 13:54:32 crc kubenswrapper[4757]: I1006 13:54:32.161760 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-66f6d6849b-pwcvf"] Oct 06 13:54:32 crc kubenswrapper[4757]: I1006 13:54:32.176109 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-84788b6bc5-ghbgp"] Oct 06 13:54:32 crc kubenswrapper[4757]: E1006 13:54:32.177011 4757 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-vjtnc,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-5f97d8c699-c7h67_openstack-operators(a1958ae9-ac1a-4ec6-9801-4c9c48f3c37c): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 06 13:54:32 crc kubenswrapper[4757]: E1006 13:54:32.177307 4757 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/nova-operator@sha256:e4c4ff39c54c0af231fb781759ab50ed86285c74d38bdea43fa75646b762d842,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-l62fj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod nova-operator-controller-manager-6c9b57c67-qqbjc_openstack-operators(52d15369-93d1-4240-9c8a-22fc91e31b2b): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 06 13:54:32 crc kubenswrapper[4757]: E1006 13:54:32.177515 4757 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/infra-operator@sha256:b6cef68bfaacdf992a9fa1a6b03a848a48c18cbb6ed12d95561b4b37d858b99f,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{600 -3} {} 600m DecimalSI},memory: {{2147483648 0} {} 2Gi BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{536870912 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:cert,ReadOnly:true,MountPath:/tmp/k8s-webhook-server/serving-certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-fb8k7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod infra-operator-controller-manager-84788b6bc5-ghbgp_openstack-operators(ec5d9f24-e11c-4966-b85d-cfb960db9568): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 06 13:54:32 crc kubenswrapper[4757]: E1006 13:54:32.177578 4757 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/placement-operator@sha256:adc23c5fd1aece2b16dc8e22ceed628f9a719455e39d3f98c77544665c6749e1,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-kvhf9,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-operator-controller-manager-66f6d6849b-pwcvf_openstack-operators(8c111ee9-f736-4f86-98b6-590b9adfa58f): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 06 13:54:32 crc kubenswrapper[4757]: E1006 13:54:32.177606 4757 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/watcher-operator@sha256:64f57b2b59dea2bd9fae91490c5bec2687131884a049e6579819d9f951b877c6,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-rb8sl,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-5d98cc5575-shmfd_openstack-operators(fa3aa8e8-3594-4e41-afa7-d59a965cde23): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 06 13:54:32 crc kubenswrapper[4757]: E1006 13:54:32.178492 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-c7h67" podUID="a1958ae9-ac1a-4ec6-9801-4c9c48f3c37c" Oct 06 13:54:32 crc kubenswrapper[4757]: I1006 13:54:32.226480 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-76d5577b-szvb9"] Oct 06 13:54:32 crc kubenswrapper[4757]: I1006 13:54:32.226802 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-5d98cc5575-shmfd"] Oct 06 13:54:32 crc kubenswrapper[4757]: I1006 13:54:32.226817 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-c7h67"] Oct 06 13:54:32 crc kubenswrapper[4757]: I1006 13:54:32.230369 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/518b074b-8b71-41d4-9105-b13aa20e3901-cert\") pod \"openstack-baremetal-operator-controller-manager-84784cd75d696n7\" (UID: \"518b074b-8b71-41d4-9105-b13aa20e3901\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-84784cd75d696n7" Oct 06 13:54:32 crc kubenswrapper[4757]: I1006 13:54:32.252189 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/518b074b-8b71-41d4-9105-b13aa20e3901-cert\") pod \"openstack-baremetal-operator-controller-manager-84784cd75d696n7\" (UID: \"518b074b-8b71-41d4-9105-b13aa20e3901\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-84784cd75d696n7" Oct 06 13:54:32 crc kubenswrapper[4757]: I1006 13:54:32.400467 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-94thd" Oct 06 13:54:32 crc kubenswrapper[4757]: I1006 13:54:32.409379 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-84784cd75d696n7" Oct 06 13:54:32 crc kubenswrapper[4757]: I1006 13:54:32.539816 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/04761044-c4c0-4fcf-9229-4050ef0e64da-cert\") pod \"openstack-operator-controller-manager-867844c698-w92jl\" (UID: \"04761044-c4c0-4fcf-9229-4050ef0e64da\") " pod="openstack-operators/openstack-operator-controller-manager-867844c698-w92jl" Oct 06 13:54:32 crc kubenswrapper[4757]: E1006 13:54:32.546779 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/placement-operator-controller-manager-66f6d6849b-pwcvf" podUID="8c111ee9-f736-4f86-98b6-590b9adfa58f" Oct 06 13:54:32 crc kubenswrapper[4757]: E1006 13:54:32.547041 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/test-operator-controller-manager-6bb6dcddc-g9hjb" podUID="37d8e975-46d7-46cd-8d20-4229c7ffaada" Oct 06 13:54:32 crc kubenswrapper[4757]: I1006 13:54:32.547282 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/04761044-c4c0-4fcf-9229-4050ef0e64da-cert\") pod \"openstack-operator-controller-manager-867844c698-w92jl\" (UID: \"04761044-c4c0-4fcf-9229-4050ef0e64da\") " pod="openstack-operators/openstack-operator-controller-manager-867844c698-w92jl" Oct 06 13:54:32 crc kubenswrapper[4757]: E1006 13:54:32.548232 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/telemetry-operator-controller-manager-f589c7597-cnfmr" podUID="f328cb34-f1aa-49c7-8b79-c465fa4cd522" Oct 06 13:54:32 crc kubenswrapper[4757]: E1006 13:54:32.552415 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/swift-operator-controller-manager-76d5577b-szvb9" podUID="045efd67-1f80-49d4-be81-46310d11b717" Oct 06 13:54:32 crc kubenswrapper[4757]: E1006 13:54:32.569718 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/watcher-operator-controller-manager-5d98cc5575-shmfd" podUID="fa3aa8e8-3594-4e41-afa7-d59a965cde23" Oct 06 13:54:32 crc kubenswrapper[4757]: E1006 13:54:32.572391 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/infra-operator-controller-manager-84788b6bc5-ghbgp" podUID="ec5d9f24-e11c-4966-b85d-cfb960db9568" Oct 06 13:54:32 crc kubenswrapper[4757]: E1006 13:54:32.600020 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/nova-operator-controller-manager-6c9b57c67-qqbjc" podUID="52d15369-93d1-4240-9c8a-22fc91e31b2b" Oct 06 13:54:32 crc kubenswrapper[4757]: I1006 13:54:32.796408 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-j8p5v" Oct 06 13:54:32 crc kubenswrapper[4757]: I1006 13:54:32.803368 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-867844c698-w92jl" Oct 06 13:54:32 crc kubenswrapper[4757]: I1006 13:54:32.912248 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-7cb48dbc-k7npw" event={"ID":"0c353b7c-52c9-490a-ab96-7d47a1dae189","Type":"ContainerStarted","Data":"119baf4acc4385a43f6b4b9b5bb1a92f6050cc4a7ff7a6cac2f9504c724bfde7"} Oct 06 13:54:32 crc kubenswrapper[4757]: I1006 13:54:32.919270 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-66f6d6849b-pwcvf" event={"ID":"8c111ee9-f736-4f86-98b6-590b9adfa58f","Type":"ContainerStarted","Data":"82d52dee3b7813f23cad6ce662c30197d17f289adc73b9e6fdcf969152245cbc"} Oct 06 13:54:32 crc kubenswrapper[4757]: I1006 13:54:32.919310 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-66f6d6849b-pwcvf" event={"ID":"8c111ee9-f736-4f86-98b6-590b9adfa58f","Type":"ContainerStarted","Data":"7a32fccacd590bfa699272d83dc8a576d97703de46427ea89bb8f3eec8d9e874"} Oct 06 13:54:32 crc kubenswrapper[4757]: I1006 13:54:32.921178 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-5b84cc7657-2lh74" event={"ID":"370a8b4d-2c86-4920-91c9-19834ea66e0e","Type":"ContainerStarted","Data":"14d5226c0291b2cbfeb228ded1ff905c8ef973bb054476dd80f3517284c90d27"} Oct 06 13:54:32 crc kubenswrapper[4757]: E1006 13:54:32.921568 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/placement-operator@sha256:adc23c5fd1aece2b16dc8e22ceed628f9a719455e39d3f98c77544665c6749e1\\\"\"" pod="openstack-operators/placement-operator-controller-manager-66f6d6849b-pwcvf" podUID="8c111ee9-f736-4f86-98b6-590b9adfa58f" Oct 06 13:54:32 crc kubenswrapper[4757]: I1006 13:54:32.929794 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-58d86cd59d-8dq6w" event={"ID":"e6876774-af7e-4e5e-bed0-db5f2ac668b9","Type":"ContainerStarted","Data":"4479c9e892651b091e07069c22669fbfcaba1c5486c085481700647ddc1249d0"} Oct 06 13:54:32 crc kubenswrapper[4757]: I1006 13:54:32.938662 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-c7h67" event={"ID":"a1958ae9-ac1a-4ec6-9801-4c9c48f3c37c","Type":"ContainerStarted","Data":"ee14650689279784d20728228554d28f72e63cd39e2df965c1716d1059af009c"} Oct 06 13:54:32 crc kubenswrapper[4757]: E1006 13:54:32.940665 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-c7h67" podUID="a1958ae9-ac1a-4ec6-9801-4c9c48f3c37c" Oct 06 13:54:32 crc kubenswrapper[4757]: I1006 13:54:32.952779 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-6c9b57c67-qqbjc" event={"ID":"52d15369-93d1-4240-9c8a-22fc91e31b2b","Type":"ContainerStarted","Data":"d32f270531106cdb41895c17dafc764984e26086ec016a6afbc8622c02b4161a"} Oct 06 13:54:32 crc kubenswrapper[4757]: I1006 13:54:32.952818 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-6c9b57c67-qqbjc" event={"ID":"52d15369-93d1-4240-9c8a-22fc91e31b2b","Type":"ContainerStarted","Data":"6b2f16013b20918d6fb3db2d07f2911d13840f1cdf3bb9d15c6f3daf0ec564c3"} Oct 06 13:54:32 crc kubenswrapper[4757]: E1006 13:54:32.960139 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/nova-operator@sha256:e4c4ff39c54c0af231fb781759ab50ed86285c74d38bdea43fa75646b762d842\\\"\"" pod="openstack-operators/nova-operator-controller-manager-6c9b57c67-qqbjc" podUID="52d15369-93d1-4240-9c8a-22fc91e31b2b" Oct 06 13:54:32 crc kubenswrapper[4757]: I1006 13:54:32.964327 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-d6c9dc5bc-tsk8p" event={"ID":"9f02bbb6-9980-4cb4-9811-5ab86eb2e6cc","Type":"ContainerStarted","Data":"355b85ab451a53810cf32187219f77ac7ae60275ca02748596bffb8c4b698f0c"} Oct 06 13:54:32 crc kubenswrapper[4757]: I1006 13:54:32.973568 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-6bb6dcddc-g9hjb" event={"ID":"37d8e975-46d7-46cd-8d20-4229c7ffaada","Type":"ContainerStarted","Data":"229a427c47351c9b8a8c63076683a95da37d66deb660a2ee9064e061265636a0"} Oct 06 13:54:32 crc kubenswrapper[4757]: I1006 13:54:32.973618 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-6bb6dcddc-g9hjb" event={"ID":"37d8e975-46d7-46cd-8d20-4229c7ffaada","Type":"ContainerStarted","Data":"00915aa4eb485bf18ad5dd478b7541e50dfe2376728d7c31328e798311d66e5e"} Oct 06 13:54:32 crc kubenswrapper[4757]: E1006 13:54:32.985142 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:0daf76cc40ab619ae266b11defcc1b65beb22d859369e7b1b04de9169089a4cb\\\"\"" pod="openstack-operators/test-operator-controller-manager-6bb6dcddc-g9hjb" podUID="37d8e975-46d7-46cd-8d20-4229c7ffaada" Oct 06 13:54:32 crc kubenswrapper[4757]: I1006 13:54:32.989001 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-69f59f9d8-7qc5n" event={"ID":"4b01c123-c126-49d3-a5f7-4f85d7d9466a","Type":"ContainerStarted","Data":"06eccf773d1433d83cab1f052f963e129e0626b3153b8f7778bdb9629a8beb77"} Oct 06 13:54:33 crc kubenswrapper[4757]: I1006 13:54:33.013411 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-f589c7597-cnfmr" event={"ID":"f328cb34-f1aa-49c7-8b79-c465fa4cd522","Type":"ContainerStarted","Data":"27f058272a2060db101e39201bcec2339891d907304839860595859280650021"} Oct 06 13:54:33 crc kubenswrapper[4757]: I1006 13:54:33.013467 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-f589c7597-cnfmr" event={"ID":"f328cb34-f1aa-49c7-8b79-c465fa4cd522","Type":"ContainerStarted","Data":"32fc1f3eb4e85898b086ae00dc3c2881fb04d69906d07eb771077dce6fc55fa0"} Oct 06 13:54:33 crc kubenswrapper[4757]: E1006 13:54:33.015518 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:bf55026ba10b80e1e24733078bd204cef8766d21a305fd000707a1e3b30ff52e\\\"\"" pod="openstack-operators/telemetry-operator-controller-manager-f589c7597-cnfmr" podUID="f328cb34-f1aa-49c7-8b79-c465fa4cd522" Oct 06 13:54:33 crc kubenswrapper[4757]: I1006 13:54:33.037730 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-5d98cc5575-shmfd" event={"ID":"fa3aa8e8-3594-4e41-afa7-d59a965cde23","Type":"ContainerStarted","Data":"d0fb6d5c633bce0a29de677e313aa500da7042caf29d9212fd1ed8297f4465b8"} Oct 06 13:54:33 crc kubenswrapper[4757]: I1006 13:54:33.037783 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-5d98cc5575-shmfd" event={"ID":"fa3aa8e8-3594-4e41-afa7-d59a965cde23","Type":"ContainerStarted","Data":"7038bf9a3b033249373c64756316baa226c784e4fe8c5ac72b1f1822ca9e0d41"} Oct 06 13:54:33 crc kubenswrapper[4757]: I1006 13:54:33.059272 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-84788b6bc5-ghbgp" event={"ID":"ec5d9f24-e11c-4966-b85d-cfb960db9568","Type":"ContainerStarted","Data":"fd8326854f0451d30b260dce9275fed97a27c5db78a148ebd96c75e46bb3a2d4"} Oct 06 13:54:33 crc kubenswrapper[4757]: I1006 13:54:33.059327 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-84788b6bc5-ghbgp" event={"ID":"ec5d9f24-e11c-4966-b85d-cfb960db9568","Type":"ContainerStarted","Data":"83e6a2a8ac888e2adbce1a6d1afdf949801a4a40bf326993ef940f30b9a6e2c7"} Oct 06 13:54:33 crc kubenswrapper[4757]: E1006 13:54:33.061529 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:64f57b2b59dea2bd9fae91490c5bec2687131884a049e6579819d9f951b877c6\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-5d98cc5575-shmfd" podUID="fa3aa8e8-3594-4e41-afa7-d59a965cde23" Oct 06 13:54:33 crc kubenswrapper[4757]: E1006 13:54:33.061970 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/infra-operator@sha256:b6cef68bfaacdf992a9fa1a6b03a848a48c18cbb6ed12d95561b4b37d858b99f\\\"\"" pod="openstack-operators/infra-operator-controller-manager-84788b6bc5-ghbgp" podUID="ec5d9f24-e11c-4966-b85d-cfb960db9568" Oct 06 13:54:33 crc kubenswrapper[4757]: I1006 13:54:33.067770 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-76d5577b-szvb9" event={"ID":"045efd67-1f80-49d4-be81-46310d11b717","Type":"ContainerStarted","Data":"17466a5a8d3a55e31778f698a2b34addf4e12eaae67bdee412d5975be2f8bfab"} Oct 06 13:54:33 crc kubenswrapper[4757]: I1006 13:54:33.067801 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-76d5577b-szvb9" event={"ID":"045efd67-1f80-49d4-be81-46310d11b717","Type":"ContainerStarted","Data":"74518492377dc5328696853612242d4b4339d10a1ee17f710b5ef65aec9b3a2a"} Oct 06 13:54:33 crc kubenswrapper[4757]: E1006 13:54:33.085763 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:637bb7b9ac308bc1e323391a3593b824f688090a856c83385814c17a571b1eed\\\"\"" pod="openstack-operators/swift-operator-controller-manager-76d5577b-szvb9" podUID="045efd67-1f80-49d4-be81-46310d11b717" Oct 06 13:54:33 crc kubenswrapper[4757]: I1006 13:54:33.094737 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-5467f8988c-w9lhz" event={"ID":"904d882e-dbf9-4f3c-897c-6cacf5a38057","Type":"ContainerStarted","Data":"906544bb54b5e20eb628cdb7bc3a3f3983ed87eeaf6c8900ed3cc88e0997d468"} Oct 06 13:54:33 crc kubenswrapper[4757]: I1006 13:54:33.102642 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-54d485fd9-kqt4h" event={"ID":"b30674df-1f48-4850-ae2c-7c464a84afea","Type":"ContainerStarted","Data":"1dc404ae69e0875e5dc16550ae2ea35ef2135882a165174d24d94a6dbdd47659"} Oct 06 13:54:33 crc kubenswrapper[4757]: I1006 13:54:33.103401 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-69b956fbf6-49zhk" event={"ID":"ef26c912-a862-423b-8e2f-2ad84c392e16","Type":"ContainerStarted","Data":"06794dc936ab64120c9340cbd1e05c4a752d25ef5b71db660a558678ede012c0"} Oct 06 13:54:33 crc kubenswrapper[4757]: I1006 13:54:33.123942 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-84784cd75d696n7"] Oct 06 13:54:33 crc kubenswrapper[4757]: I1006 13:54:33.182425 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-867844c698-w92jl"] Oct 06 13:54:34 crc kubenswrapper[4757]: I1006 13:54:34.142298 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-867844c698-w92jl" event={"ID":"04761044-c4c0-4fcf-9229-4050ef0e64da","Type":"ContainerStarted","Data":"d936fd006142d90a891a44c808dc399e93fc58be1c4ffe202af9df3a04571b53"} Oct 06 13:54:34 crc kubenswrapper[4757]: I1006 13:54:34.142713 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-867844c698-w92jl" event={"ID":"04761044-c4c0-4fcf-9229-4050ef0e64da","Type":"ContainerStarted","Data":"39359ca154a276ce7d705ce09a38906e9f7b39c7e8eec8602d80f11cf166a63c"} Oct 06 13:54:34 crc kubenswrapper[4757]: I1006 13:54:34.142728 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-867844c698-w92jl" event={"ID":"04761044-c4c0-4fcf-9229-4050ef0e64da","Type":"ContainerStarted","Data":"c81897c70c0454ee311a0bb9e4c4f9e6c61b5c81170ac1e3d787c1f4902b358a"} Oct 06 13:54:34 crc kubenswrapper[4757]: I1006 13:54:34.142765 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-867844c698-w92jl" Oct 06 13:54:34 crc kubenswrapper[4757]: I1006 13:54:34.177977 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-84784cd75d696n7" event={"ID":"518b074b-8b71-41d4-9105-b13aa20e3901","Type":"ContainerStarted","Data":"3999d7c5b1afc76d0eeff9c912c657d35330485cc58389ebc2697f80b3de08c1"} Oct 06 13:54:34 crc kubenswrapper[4757]: E1006 13:54:34.221006 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:bf55026ba10b80e1e24733078bd204cef8766d21a305fd000707a1e3b30ff52e\\\"\"" pod="openstack-operators/telemetry-operator-controller-manager-f589c7597-cnfmr" podUID="f328cb34-f1aa-49c7-8b79-c465fa4cd522" Oct 06 13:54:34 crc kubenswrapper[4757]: E1006 13:54:34.221266 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:0daf76cc40ab619ae266b11defcc1b65beb22d859369e7b1b04de9169089a4cb\\\"\"" pod="openstack-operators/test-operator-controller-manager-6bb6dcddc-g9hjb" podUID="37d8e975-46d7-46cd-8d20-4229c7ffaada" Oct 06 13:54:34 crc kubenswrapper[4757]: E1006 13:54:34.221332 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/nova-operator@sha256:e4c4ff39c54c0af231fb781759ab50ed86285c74d38bdea43fa75646b762d842\\\"\"" pod="openstack-operators/nova-operator-controller-manager-6c9b57c67-qqbjc" podUID="52d15369-93d1-4240-9c8a-22fc91e31b2b" Oct 06 13:54:34 crc kubenswrapper[4757]: E1006 13:54:34.221367 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:64f57b2b59dea2bd9fae91490c5bec2687131884a049e6579819d9f951b877c6\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-5d98cc5575-shmfd" podUID="fa3aa8e8-3594-4e41-afa7-d59a965cde23" Oct 06 13:54:34 crc kubenswrapper[4757]: E1006 13:54:34.221406 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/placement-operator@sha256:adc23c5fd1aece2b16dc8e22ceed628f9a719455e39d3f98c77544665c6749e1\\\"\"" pod="openstack-operators/placement-operator-controller-manager-66f6d6849b-pwcvf" podUID="8c111ee9-f736-4f86-98b6-590b9adfa58f" Oct 06 13:54:34 crc kubenswrapper[4757]: E1006 13:54:34.221450 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:637bb7b9ac308bc1e323391a3593b824f688090a856c83385814c17a571b1eed\\\"\"" pod="openstack-operators/swift-operator-controller-manager-76d5577b-szvb9" podUID="045efd67-1f80-49d4-be81-46310d11b717" Oct 06 13:54:34 crc kubenswrapper[4757]: E1006 13:54:34.221491 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/infra-operator@sha256:b6cef68bfaacdf992a9fa1a6b03a848a48c18cbb6ed12d95561b4b37d858b99f\\\"\"" pod="openstack-operators/infra-operator-controller-manager-84788b6bc5-ghbgp" podUID="ec5d9f24-e11c-4966-b85d-cfb960db9568" Oct 06 13:54:34 crc kubenswrapper[4757]: E1006 13:54:34.221492 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-c7h67" podUID="a1958ae9-ac1a-4ec6-9801-4c9c48f3c37c" Oct 06 13:54:34 crc kubenswrapper[4757]: I1006 13:54:34.228059 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-manager-867844c698-w92jl" podStartSLOduration=4.228035813 podStartE2EDuration="4.228035813s" podCreationTimestamp="2025-10-06 13:54:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:54:34.190976795 +0000 UTC m=+962.688295332" watchObservedRunningTime="2025-10-06 13:54:34.228035813 +0000 UTC m=+962.725354350" Oct 06 13:54:34 crc kubenswrapper[4757]: I1006 13:54:34.363827 4757 patch_prober.go:28] interesting pod/machine-config-daemon-7tb7h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 06 13:54:34 crc kubenswrapper[4757]: I1006 13:54:34.363898 4757 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 06 13:54:34 crc kubenswrapper[4757]: I1006 13:54:34.363933 4757 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" Oct 06 13:54:35 crc kubenswrapper[4757]: I1006 13:54:35.188392 4757 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"c10bf450268206bb6caa070d8d9e8b690b70b76277c3af98f50337e231aead63"} pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 06 13:54:35 crc kubenswrapper[4757]: I1006 13:54:35.188738 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" containerName="machine-config-daemon" containerID="cri-o://c10bf450268206bb6caa070d8d9e8b690b70b76277c3af98f50337e231aead63" gracePeriod=600 Oct 06 13:54:36 crc kubenswrapper[4757]: I1006 13:54:36.197449 4757 generic.go:334] "Generic (PLEG): container finished" podID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" containerID="c10bf450268206bb6caa070d8d9e8b690b70b76277c3af98f50337e231aead63" exitCode=0 Oct 06 13:54:36 crc kubenswrapper[4757]: I1006 13:54:36.197536 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" event={"ID":"0010c888-d5ad-4b2b-8309-1647fdf0dee3","Type":"ContainerDied","Data":"c10bf450268206bb6caa070d8d9e8b690b70b76277c3af98f50337e231aead63"} Oct 06 13:54:36 crc kubenswrapper[4757]: I1006 13:54:36.197955 4757 scope.go:117] "RemoveContainer" containerID="6542a2ffbfd366a895d5ce83b4ab3a725c3018b8a608b5d8f6bff2292762cd35" Oct 06 13:54:42 crc kubenswrapper[4757]: I1006 13:54:42.814350 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-manager-867844c698-w92jl" Oct 06 13:54:44 crc kubenswrapper[4757]: I1006 13:54:44.346799 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-84bd8f6848-cm2d5" event={"ID":"b539d43b-476d-46dc-944b-47ba64f84566","Type":"ContainerStarted","Data":"622a0b1cb3c1c8806fc5e8adfe083a71dd594717be355c5f8f06cb0dc90f9d23"} Oct 06 13:54:44 crc kubenswrapper[4757]: I1006 13:54:44.368914 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-84784cd75d696n7" event={"ID":"518b074b-8b71-41d4-9105-b13aa20e3901","Type":"ContainerStarted","Data":"2ff39e68a03649f33b85323c425f0decfc86134cf057ade6c1fb2a75421e2707"} Oct 06 13:54:44 crc kubenswrapper[4757]: I1006 13:54:44.395020 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-58d86cd59d-8dq6w" event={"ID":"e6876774-af7e-4e5e-bed0-db5f2ac668b9","Type":"ContainerStarted","Data":"6069b127d7cf62e4a78ae2d58123581d14378fb26c2fce2ddcc5eb3298826847"} Oct 06 13:54:44 crc kubenswrapper[4757]: I1006 13:54:44.413924 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-fd648f65-rbrjz" event={"ID":"deeb9237-8930-41f6-94e5-bd012cca4f95","Type":"ContainerStarted","Data":"9a778d4a59ae5e2003a568b2076519aa50337cf2d271fb12cd3361f30d222b1b"} Oct 06 13:54:44 crc kubenswrapper[4757]: I1006 13:54:44.425863 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-d6c9dc5bc-tsk8p" event={"ID":"9f02bbb6-9980-4cb4-9811-5ab86eb2e6cc","Type":"ContainerStarted","Data":"63f91cc3efde6c5176db67faec5de3d0099ce90715e1a654307711c919cb4226"} Oct 06 13:54:44 crc kubenswrapper[4757]: I1006 13:54:44.471930 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" event={"ID":"0010c888-d5ad-4b2b-8309-1647fdf0dee3","Type":"ContainerStarted","Data":"8ba4bb2fc370115674e0f99022b0181292af98f8a9ad6252b38df72c7f3b30ad"} Oct 06 13:54:44 crc kubenswrapper[4757]: I1006 13:54:44.489665 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-54d485fd9-kqt4h" event={"ID":"b30674df-1f48-4850-ae2c-7c464a84afea","Type":"ContainerStarted","Data":"d2c71a2f60d29cbb5f32cc1a09ea8900b31f99578a01c7948f6b2faa4f92ce19"} Oct 06 13:54:44 crc kubenswrapper[4757]: I1006 13:54:44.489710 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-54d485fd9-kqt4h" event={"ID":"b30674df-1f48-4850-ae2c-7c464a84afea","Type":"ContainerStarted","Data":"5ce38abdee851b23e5461d5f1a7c3bac385e32b0a0ca0746c0c953a4cb7c69f7"} Oct 06 13:54:44 crc kubenswrapper[4757]: I1006 13:54:44.490305 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-54d485fd9-kqt4h" Oct 06 13:54:44 crc kubenswrapper[4757]: I1006 13:54:44.506242 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-7ccfc8cf49-np2qg" event={"ID":"5f9c96a5-1d49-48a6-89d4-56fdea632598","Type":"ContainerStarted","Data":"8e9b2ad03fbe5ab78ab02a59027a550580615d75c56f658a7293e4574d17621c"} Oct 06 13:54:44 crc kubenswrapper[4757]: I1006 13:54:44.511354 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-69b956fbf6-49zhk" event={"ID":"ef26c912-a862-423b-8e2f-2ad84c392e16","Type":"ContainerStarted","Data":"0ffc22019ac5701faaa34ee0e7652ce8e80aa85cf35fd81d85e21adced19e7b0"} Oct 06 13:54:44 crc kubenswrapper[4757]: I1006 13:54:44.524448 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-69f59f9d8-7qc5n" event={"ID":"4b01c123-c126-49d3-a5f7-4f85d7d9466a","Type":"ContainerStarted","Data":"a3a0a5c8dddc50aeb2e3a33e06c49dac5087f459db37bce2911115510477922f"} Oct 06 13:54:44 crc kubenswrapper[4757]: I1006 13:54:44.589577 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-5467f8988c-w9lhz" event={"ID":"904d882e-dbf9-4f3c-897c-6cacf5a38057","Type":"ContainerStarted","Data":"27b1549bc17a73c8a1b32a2c65807aa43af961dd986d7957562364d5d96fef17"} Oct 06 13:54:44 crc kubenswrapper[4757]: I1006 13:54:44.632563 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-64f56ff694-bwzkj" event={"ID":"44a67ef5-395a-4ba1-8572-847e4bc6e4a1","Type":"ContainerStarted","Data":"3329676d509c9cdd83027e2a253d2834d649bc5637251092288c13a115ad232a"} Oct 06 13:54:44 crc kubenswrapper[4757]: I1006 13:54:44.639123 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-7cb48dbc-k7npw" event={"ID":"0c353b7c-52c9-490a-ab96-7d47a1dae189","Type":"ContainerStarted","Data":"040d4e2bd4d64fc80b826501c7b3f702f0590cfdc8b05cb4c39976f7585fb2ff"} Oct 06 13:54:44 crc kubenswrapper[4757]: I1006 13:54:44.669235 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-5b477879bc-7z4cc" event={"ID":"28e3257e-3a28-4a92-8658-936b70dd1b79","Type":"ContainerStarted","Data":"1278ea2b35fe47d10c88fcf0dd190b497ff9bf84c304c5cb474e81ff60577006"} Oct 06 13:54:45 crc kubenswrapper[4757]: I1006 13:54:45.677478 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-7ccfc8cf49-np2qg" event={"ID":"5f9c96a5-1d49-48a6-89d4-56fdea632598","Type":"ContainerStarted","Data":"6a78e944f3ae1db9020008724b0489f7cbae4f1b2e328bd1374f97e831a1ab1a"} Oct 06 13:54:45 crc kubenswrapper[4757]: I1006 13:54:45.678083 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-7ccfc8cf49-np2qg" Oct 06 13:54:45 crc kubenswrapper[4757]: I1006 13:54:45.679447 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-69b956fbf6-49zhk" event={"ID":"ef26c912-a862-423b-8e2f-2ad84c392e16","Type":"ContainerStarted","Data":"b17f0ce4b4883645aaafaf693296adf977e1a24ef5bb7048f6037f65fa017e75"} Oct 06 13:54:45 crc kubenswrapper[4757]: I1006 13:54:45.679609 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-69b956fbf6-49zhk" Oct 06 13:54:45 crc kubenswrapper[4757]: I1006 13:54:45.681948 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-84bd8f6848-cm2d5" event={"ID":"b539d43b-476d-46dc-944b-47ba64f84566","Type":"ContainerStarted","Data":"1763f95b8a78761b39dec395e63ec3f4663ff0e7bedf56eadc41038dad9307d5"} Oct 06 13:54:45 crc kubenswrapper[4757]: I1006 13:54:45.682085 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-84bd8f6848-cm2d5" Oct 06 13:54:45 crc kubenswrapper[4757]: I1006 13:54:45.683756 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-84784cd75d696n7" event={"ID":"518b074b-8b71-41d4-9105-b13aa20e3901","Type":"ContainerStarted","Data":"eb761047f8afb7fa85951c23c7791fc06d0e3844cdf2a5046608efbfa18b8c40"} Oct 06 13:54:45 crc kubenswrapper[4757]: I1006 13:54:45.683868 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-baremetal-operator-controller-manager-84784cd75d696n7" Oct 06 13:54:45 crc kubenswrapper[4757]: I1006 13:54:45.685830 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-58d86cd59d-8dq6w" event={"ID":"e6876774-af7e-4e5e-bed0-db5f2ac668b9","Type":"ContainerStarted","Data":"ec1a3626473c7c2c0a15c8b4a928c50e98ef74b9136aa8cf8d2b99eb54662b57"} Oct 06 13:54:45 crc kubenswrapper[4757]: I1006 13:54:45.685951 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-58d86cd59d-8dq6w" Oct 06 13:54:45 crc kubenswrapper[4757]: I1006 13:54:45.687508 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-5b477879bc-7z4cc" event={"ID":"28e3257e-3a28-4a92-8658-936b70dd1b79","Type":"ContainerStarted","Data":"711ba9c6aa2839dc5de84d45c9046a495654ffdc71eb17736c7da30b1ea1c54b"} Oct 06 13:54:45 crc kubenswrapper[4757]: I1006 13:54:45.687633 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-5b477879bc-7z4cc" Oct 06 13:54:45 crc kubenswrapper[4757]: I1006 13:54:45.688919 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-d6c9dc5bc-tsk8p" event={"ID":"9f02bbb6-9980-4cb4-9811-5ab86eb2e6cc","Type":"ContainerStarted","Data":"8cb4be9933d4f09f108eec56c204e48c2d1f268d45a921780c003d9c8c5d4f41"} Oct 06 13:54:45 crc kubenswrapper[4757]: I1006 13:54:45.689026 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-d6c9dc5bc-tsk8p" Oct 06 13:54:45 crc kubenswrapper[4757]: I1006 13:54:45.690548 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-5467f8988c-w9lhz" event={"ID":"904d882e-dbf9-4f3c-897c-6cacf5a38057","Type":"ContainerStarted","Data":"789be96fd6400a537898298b1ab76e6f32b8a2b453a59c1b6a83d60899f8ca01"} Oct 06 13:54:45 crc kubenswrapper[4757]: I1006 13:54:45.690664 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-5467f8988c-w9lhz" Oct 06 13:54:45 crc kubenswrapper[4757]: I1006 13:54:45.691940 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-64f56ff694-bwzkj" event={"ID":"44a67ef5-395a-4ba1-8572-847e4bc6e4a1","Type":"ContainerStarted","Data":"7fc7e37afb7f5154e8c27dee3d7d0842862ad20b1a712983977a672f4a681e0f"} Oct 06 13:54:45 crc kubenswrapper[4757]: I1006 13:54:45.692036 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-64f56ff694-bwzkj" Oct 06 13:54:45 crc kubenswrapper[4757]: I1006 13:54:45.693592 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-7cb48dbc-k7npw" event={"ID":"0c353b7c-52c9-490a-ab96-7d47a1dae189","Type":"ContainerStarted","Data":"2f1d6fa7d686195c9b645d4322f9df1fe09aa686540153d2cbfcf1fe2f388e8a"} Oct 06 13:54:45 crc kubenswrapper[4757]: I1006 13:54:45.693662 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-7cb48dbc-k7npw" Oct 06 13:54:45 crc kubenswrapper[4757]: I1006 13:54:45.695116 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-69f59f9d8-7qc5n" event={"ID":"4b01c123-c126-49d3-a5f7-4f85d7d9466a","Type":"ContainerStarted","Data":"0170adea468684c5877f74f794366594a0de7634c2d1c942acfcba7da0df3d3e"} Oct 06 13:54:45 crc kubenswrapper[4757]: I1006 13:54:45.695233 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-69f59f9d8-7qc5n" Oct 06 13:54:45 crc kubenswrapper[4757]: I1006 13:54:45.696989 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-5b84cc7657-2lh74" event={"ID":"370a8b4d-2c86-4920-91c9-19834ea66e0e","Type":"ContainerStarted","Data":"be17ca9ead5935ef820fd4e40877259ec4b964e0f8e30a236e41871d693332e3"} Oct 06 13:54:45 crc kubenswrapper[4757]: I1006 13:54:45.697012 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-5b84cc7657-2lh74" event={"ID":"370a8b4d-2c86-4920-91c9-19834ea66e0e","Type":"ContainerStarted","Data":"508e85f8d97e9fba7060dccdf96b86e3618af8a9af7b012d0bf3f7e20504c3f5"} Oct 06 13:54:45 crc kubenswrapper[4757]: I1006 13:54:45.697116 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-5b84cc7657-2lh74" Oct 06 13:54:45 crc kubenswrapper[4757]: I1006 13:54:45.698622 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-fd648f65-rbrjz" event={"ID":"deeb9237-8930-41f6-94e5-bd012cca4f95","Type":"ContainerStarted","Data":"7f5df38434989ecba9716201b424413d59ee4417a92525e17e6530766c641873"} Oct 06 13:54:45 crc kubenswrapper[4757]: I1006 13:54:45.698889 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-fd648f65-rbrjz" Oct 06 13:54:45 crc kubenswrapper[4757]: I1006 13:54:45.701577 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/heat-operator-controller-manager-7ccfc8cf49-np2qg" podStartSLOduration=3.896830771 podStartE2EDuration="15.701556559s" podCreationTimestamp="2025-10-06 13:54:30 +0000 UTC" firstStartedPulling="2025-10-06 13:54:31.525717888 +0000 UTC m=+960.023036425" lastFinishedPulling="2025-10-06 13:54:43.330443676 +0000 UTC m=+971.827762213" observedRunningTime="2025-10-06 13:54:45.691537515 +0000 UTC m=+974.188856052" watchObservedRunningTime="2025-10-06 13:54:45.701556559 +0000 UTC m=+974.198875096" Oct 06 13:54:45 crc kubenswrapper[4757]: I1006 13:54:45.701847 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ovn-operator-controller-manager-54d485fd9-kqt4h" podStartSLOduration=4.488988638 podStartE2EDuration="15.701842587s" podCreationTimestamp="2025-10-06 13:54:30 +0000 UTC" firstStartedPulling="2025-10-06 13:54:32.044027721 +0000 UTC m=+960.541346258" lastFinishedPulling="2025-10-06 13:54:43.25688168 +0000 UTC m=+971.754200207" observedRunningTime="2025-10-06 13:54:44.544555512 +0000 UTC m=+973.041874049" watchObservedRunningTime="2025-10-06 13:54:45.701842587 +0000 UTC m=+974.199161114" Oct 06 13:54:45 crc kubenswrapper[4757]: I1006 13:54:45.726303 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-d6c9dc5bc-tsk8p" podStartSLOduration=4.527198951 podStartE2EDuration="15.726278952s" podCreationTimestamp="2025-10-06 13:54:30 +0000 UTC" firstStartedPulling="2025-10-06 13:54:32.088174383 +0000 UTC m=+960.585492920" lastFinishedPulling="2025-10-06 13:54:43.287254384 +0000 UTC m=+971.784572921" observedRunningTime="2025-10-06 13:54:45.722025858 +0000 UTC m=+974.219344465" watchObservedRunningTime="2025-10-06 13:54:45.726278952 +0000 UTC m=+974.223597499" Oct 06 13:54:45 crc kubenswrapper[4757]: I1006 13:54:45.755170 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-baremetal-operator-controller-manager-84784cd75d696n7" podStartSLOduration=5.6369767060000004 podStartE2EDuration="15.755152634s" podCreationTimestamp="2025-10-06 13:54:30 +0000 UTC" firstStartedPulling="2025-10-06 13:54:33.169033205 +0000 UTC m=+961.666351742" lastFinishedPulling="2025-10-06 13:54:43.287209133 +0000 UTC m=+971.784527670" observedRunningTime="2025-10-06 13:54:45.752392807 +0000 UTC m=+974.249711374" watchObservedRunningTime="2025-10-06 13:54:45.755152634 +0000 UTC m=+974.252471171" Oct 06 13:54:45 crc kubenswrapper[4757]: I1006 13:54:45.779626 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/designate-operator-controller-manager-58d86cd59d-8dq6w" podStartSLOduration=5.575671384 podStartE2EDuration="16.779578078s" podCreationTimestamp="2025-10-06 13:54:29 +0000 UTC" firstStartedPulling="2025-10-06 13:54:32.07736561 +0000 UTC m=+960.574684147" lastFinishedPulling="2025-10-06 13:54:43.281272304 +0000 UTC m=+971.778590841" observedRunningTime="2025-10-06 13:54:45.775598123 +0000 UTC m=+974.272916660" watchObservedRunningTime="2025-10-06 13:54:45.779578078 +0000 UTC m=+974.276896615" Oct 06 13:54:45 crc kubenswrapper[4757]: I1006 13:54:45.802123 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/cinder-operator-controller-manager-84bd8f6848-cm2d5" podStartSLOduration=5.050835312 podStartE2EDuration="16.802081622s" podCreationTimestamp="2025-10-06 13:54:29 +0000 UTC" firstStartedPulling="2025-10-06 13:54:31.584721812 +0000 UTC m=+960.082040349" lastFinishedPulling="2025-10-06 13:54:43.335968122 +0000 UTC m=+971.833286659" observedRunningTime="2025-10-06 13:54:45.797713935 +0000 UTC m=+974.295032482" watchObservedRunningTime="2025-10-06 13:54:45.802081622 +0000 UTC m=+974.299400159" Oct 06 13:54:45 crc kubenswrapper[4757]: I1006 13:54:45.821598 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ironic-operator-controller-manager-5467f8988c-w9lhz" podStartSLOduration=4.7203818 podStartE2EDuration="15.821581681s" podCreationTimestamp="2025-10-06 13:54:30 +0000 UTC" firstStartedPulling="2025-10-06 13:54:32.10663297 +0000 UTC m=+960.603951517" lastFinishedPulling="2025-10-06 13:54:43.207832861 +0000 UTC m=+971.705151398" observedRunningTime="2025-10-06 13:54:45.81516241 +0000 UTC m=+974.312480967" watchObservedRunningTime="2025-10-06 13:54:45.821581681 +0000 UTC m=+974.318900218" Oct 06 13:54:45 crc kubenswrapper[4757]: I1006 13:54:45.834682 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-5b477879bc-7z4cc" podStartSLOduration=4.175786904 podStartE2EDuration="15.83466438s" podCreationTimestamp="2025-10-06 13:54:30 +0000 UTC" firstStartedPulling="2025-10-06 13:54:31.676416054 +0000 UTC m=+960.173734591" lastFinishedPulling="2025-10-06 13:54:43.33529353 +0000 UTC m=+971.832612067" observedRunningTime="2025-10-06 13:54:45.830090237 +0000 UTC m=+974.327408784" watchObservedRunningTime="2025-10-06 13:54:45.83466438 +0000 UTC m=+974.331982917" Oct 06 13:54:45 crc kubenswrapper[4757]: I1006 13:54:45.842845 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/neutron-operator-controller-manager-69b956fbf6-49zhk" podStartSLOduration=4.813065503 podStartE2EDuration="15.842826005s" podCreationTimestamp="2025-10-06 13:54:30 +0000 UTC" firstStartedPulling="2025-10-06 13:54:32.176605233 +0000 UTC m=+960.673923770" lastFinishedPulling="2025-10-06 13:54:43.206365735 +0000 UTC m=+971.703684272" observedRunningTime="2025-10-06 13:54:45.84203071 +0000 UTC m=+974.339349257" watchObservedRunningTime="2025-10-06 13:54:45.842826005 +0000 UTC m=+974.340144552" Oct 06 13:54:45 crc kubenswrapper[4757]: I1006 13:54:45.861904 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/octavia-operator-controller-manager-69f59f9d8-7qc5n" podStartSLOduration=4.69966213 podStartE2EDuration="15.861888451s" podCreationTimestamp="2025-10-06 13:54:30 +0000 UTC" firstStartedPulling="2025-10-06 13:54:32.044069622 +0000 UTC m=+960.541388159" lastFinishedPulling="2025-10-06 13:54:43.206295933 +0000 UTC m=+971.703614480" observedRunningTime="2025-10-06 13:54:45.859015591 +0000 UTC m=+974.356334128" watchObservedRunningTime="2025-10-06 13:54:45.861888451 +0000 UTC m=+974.359206988" Oct 06 13:54:45 crc kubenswrapper[4757]: I1006 13:54:45.889039 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-7cb48dbc-k7npw" podStartSLOduration=4.645206138 podStartE2EDuration="15.88901965s" podCreationTimestamp="2025-10-06 13:54:30 +0000 UTC" firstStartedPulling="2025-10-06 13:54:32.024364726 +0000 UTC m=+960.521683263" lastFinishedPulling="2025-10-06 13:54:43.268178238 +0000 UTC m=+971.765496775" observedRunningTime="2025-10-06 13:54:45.887583155 +0000 UTC m=+974.384901692" watchObservedRunningTime="2025-10-06 13:54:45.88901965 +0000 UTC m=+974.386338187" Oct 06 13:54:45 crc kubenswrapper[4757]: I1006 13:54:45.921785 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-5b84cc7657-2lh74" podStartSLOduration=4.697451192 podStartE2EDuration="15.921754233s" podCreationTimestamp="2025-10-06 13:54:30 +0000 UTC" firstStartedPulling="2025-10-06 13:54:31.983123487 +0000 UTC m=+960.480442034" lastFinishedPulling="2025-10-06 13:54:43.207426538 +0000 UTC m=+971.704745075" observedRunningTime="2025-10-06 13:54:45.912864165 +0000 UTC m=+974.410182702" watchObservedRunningTime="2025-10-06 13:54:45.921754233 +0000 UTC m=+974.419072770" Oct 06 13:54:45 crc kubenswrapper[4757]: I1006 13:54:45.939651 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-64f56ff694-bwzkj" podStartSLOduration=5.30314069 podStartE2EDuration="16.939626393s" podCreationTimestamp="2025-10-06 13:54:29 +0000 UTC" firstStartedPulling="2025-10-06 13:54:31.570982286 +0000 UTC m=+960.068300833" lastFinishedPulling="2025-10-06 13:54:43.207467999 +0000 UTC m=+971.704786536" observedRunningTime="2025-10-06 13:54:45.935681359 +0000 UTC m=+974.432999906" watchObservedRunningTime="2025-10-06 13:54:45.939626393 +0000 UTC m=+974.436944930" Oct 06 13:54:45 crc kubenswrapper[4757]: I1006 13:54:45.959272 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-fd648f65-rbrjz" podStartSLOduration=4.341156715 podStartE2EDuration="15.959245696s" podCreationTimestamp="2025-10-06 13:54:30 +0000 UTC" firstStartedPulling="2025-10-06 13:54:31.589149522 +0000 UTC m=+960.086468059" lastFinishedPulling="2025-10-06 13:54:43.207238503 +0000 UTC m=+971.704557040" observedRunningTime="2025-10-06 13:54:45.955776167 +0000 UTC m=+974.453094694" watchObservedRunningTime="2025-10-06 13:54:45.959245696 +0000 UTC m=+974.456564233" Oct 06 13:54:50 crc kubenswrapper[4757]: I1006 13:54:50.299900 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-64f56ff694-bwzkj" Oct 06 13:54:50 crc kubenswrapper[4757]: I1006 13:54:50.321466 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/cinder-operator-controller-manager-84bd8f6848-cm2d5" Oct 06 13:54:50 crc kubenswrapper[4757]: I1006 13:54:50.331841 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/designate-operator-controller-manager-58d86cd59d-8dq6w" Oct 06 13:54:50 crc kubenswrapper[4757]: I1006 13:54:50.366898 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-fd648f65-rbrjz" Oct 06 13:54:50 crc kubenswrapper[4757]: I1006 13:54:50.441929 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/heat-operator-controller-manager-7ccfc8cf49-np2qg" Oct 06 13:54:50 crc kubenswrapper[4757]: I1006 13:54:50.494246 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-5b477879bc-7z4cc" Oct 06 13:54:50 crc kubenswrapper[4757]: I1006 13:54:50.558254 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-d6c9dc5bc-tsk8p" Oct 06 13:54:50 crc kubenswrapper[4757]: I1006 13:54:50.580382 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/octavia-operator-controller-manager-69f59f9d8-7qc5n" Oct 06 13:54:50 crc kubenswrapper[4757]: I1006 13:54:50.589584 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-5b84cc7657-2lh74" Oct 06 13:54:50 crc kubenswrapper[4757]: I1006 13:54:50.671349 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ironic-operator-controller-manager-5467f8988c-w9lhz" Oct 06 13:54:50 crc kubenswrapper[4757]: I1006 13:54:50.707983 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-7cb48dbc-k7npw" Oct 06 13:54:50 crc kubenswrapper[4757]: I1006 13:54:50.790578 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/neutron-operator-controller-manager-69b956fbf6-49zhk" Oct 06 13:54:50 crc kubenswrapper[4757]: I1006 13:54:50.897344 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ovn-operator-controller-manager-54d485fd9-kqt4h" Oct 06 13:54:52 crc kubenswrapper[4757]: I1006 13:54:52.418026 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-baremetal-operator-controller-manager-84784cd75d696n7" Oct 06 13:54:55 crc kubenswrapper[4757]: I1006 13:54:55.790313 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-76d5577b-szvb9" event={"ID":"045efd67-1f80-49d4-be81-46310d11b717","Type":"ContainerStarted","Data":"0d1817ffbfe9e94b1b25913a81bfae14fd55f00cdf92a91776c44d008d0bae64"} Oct 06 13:54:55 crc kubenswrapper[4757]: I1006 13:54:55.791482 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-76d5577b-szvb9" Oct 06 13:54:55 crc kubenswrapper[4757]: I1006 13:54:55.792439 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-6bb6dcddc-g9hjb" event={"ID":"37d8e975-46d7-46cd-8d20-4229c7ffaada","Type":"ContainerStarted","Data":"a997033a220780190c0c35d359d4534279c40385f4bfdcc9963dc0c3fccb28dd"} Oct 06 13:54:55 crc kubenswrapper[4757]: I1006 13:54:55.792822 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-6bb6dcddc-g9hjb" Oct 06 13:54:55 crc kubenswrapper[4757]: I1006 13:54:55.794825 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-c7h67" event={"ID":"a1958ae9-ac1a-4ec6-9801-4c9c48f3c37c","Type":"ContainerStarted","Data":"2b51216043d00080c375dd2052889fe903bdc2db8bae86f49dcae6dd0d56507b"} Oct 06 13:54:55 crc kubenswrapper[4757]: I1006 13:54:55.798578 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-66f6d6849b-pwcvf" event={"ID":"8c111ee9-f736-4f86-98b6-590b9adfa58f","Type":"ContainerStarted","Data":"b5980f67417684cea9ef08277f23f0ffc3484e45200794274d983d5496dea4a1"} Oct 06 13:54:55 crc kubenswrapper[4757]: I1006 13:54:55.798857 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-66f6d6849b-pwcvf" Oct 06 13:54:55 crc kubenswrapper[4757]: I1006 13:54:55.800562 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-6c9b57c67-qqbjc" event={"ID":"52d15369-93d1-4240-9c8a-22fc91e31b2b","Type":"ContainerStarted","Data":"b0cbf93ac5a88430f2794a4b25ca2345550692002cafce48a3fe065506d6228b"} Oct 06 13:54:55 crc kubenswrapper[4757]: I1006 13:54:55.801044 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-6c9b57c67-qqbjc" Oct 06 13:54:55 crc kubenswrapper[4757]: I1006 13:54:55.803498 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-f589c7597-cnfmr" event={"ID":"f328cb34-f1aa-49c7-8b79-c465fa4cd522","Type":"ContainerStarted","Data":"cad7ceb178157591e77e055cf541d9bbf3755bfe23261b6fc0f3f4729f1b3144"} Oct 06 13:54:55 crc kubenswrapper[4757]: I1006 13:54:55.804088 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-f589c7597-cnfmr" Oct 06 13:54:55 crc kubenswrapper[4757]: I1006 13:54:55.807536 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-5d98cc5575-shmfd" event={"ID":"fa3aa8e8-3594-4e41-afa7-d59a965cde23","Type":"ContainerStarted","Data":"622e46b6e610dad653394b74f84e92ffabe33ef34320607e8c2e7bc393b0959e"} Oct 06 13:54:55 crc kubenswrapper[4757]: I1006 13:54:55.808869 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-5d98cc5575-shmfd" Oct 06 13:54:55 crc kubenswrapper[4757]: I1006 13:54:55.812281 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-76d5577b-szvb9" podStartSLOduration=3.388524067 podStartE2EDuration="25.812261976s" podCreationTimestamp="2025-10-06 13:54:30 +0000 UTC" firstStartedPulling="2025-10-06 13:54:32.118577709 +0000 UTC m=+960.615896246" lastFinishedPulling="2025-10-06 13:54:54.542315618 +0000 UTC m=+983.039634155" observedRunningTime="2025-10-06 13:54:55.810703108 +0000 UTC m=+984.308021665" watchObservedRunningTime="2025-10-06 13:54:55.812261976 +0000 UTC m=+984.309580533" Oct 06 13:54:55 crc kubenswrapper[4757]: I1006 13:54:55.812977 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-84788b6bc5-ghbgp" event={"ID":"ec5d9f24-e11c-4966-b85d-cfb960db9568","Type":"ContainerStarted","Data":"fbd86c3c77ebd1645021eaf501d1304447d20b3339ffb4873641e8f5ba36a1b8"} Oct 06 13:54:55 crc kubenswrapper[4757]: I1006 13:54:55.813325 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-84788b6bc5-ghbgp" Oct 06 13:54:55 crc kubenswrapper[4757]: I1006 13:54:55.844087 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-5d98cc5575-shmfd" podStartSLOduration=3.5029651040000003 podStartE2EDuration="25.84405962s" podCreationTimestamp="2025-10-06 13:54:30 +0000 UTC" firstStartedPulling="2025-10-06 13:54:32.17748154 +0000 UTC m=+960.674800067" lastFinishedPulling="2025-10-06 13:54:54.518576056 +0000 UTC m=+983.015894583" observedRunningTime="2025-10-06 13:54:55.828932987 +0000 UTC m=+984.326251554" watchObservedRunningTime="2025-10-06 13:54:55.84405962 +0000 UTC m=+984.341378177" Oct 06 13:54:55 crc kubenswrapper[4757]: I1006 13:54:55.860028 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-c7h67" podStartSLOduration=3.493885061 podStartE2EDuration="25.860005409s" podCreationTimestamp="2025-10-06 13:54:30 +0000 UTC" firstStartedPulling="2025-10-06 13:54:32.176913082 +0000 UTC m=+960.674231619" lastFinishedPulling="2025-10-06 13:54:54.54303343 +0000 UTC m=+983.040351967" observedRunningTime="2025-10-06 13:54:55.859815843 +0000 UTC m=+984.357134430" watchObservedRunningTime="2025-10-06 13:54:55.860005409 +0000 UTC m=+984.357323956" Oct 06 13:54:55 crc kubenswrapper[4757]: I1006 13:54:55.883717 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/telemetry-operator-controller-manager-f589c7597-cnfmr" podStartSLOduration=3.488674913 podStartE2EDuration="25.88369358s" podCreationTimestamp="2025-10-06 13:54:30 +0000 UTC" firstStartedPulling="2025-10-06 13:54:32.107844258 +0000 UTC m=+960.605162795" lastFinishedPulling="2025-10-06 13:54:54.502862925 +0000 UTC m=+983.000181462" observedRunningTime="2025-10-06 13:54:55.880378876 +0000 UTC m=+984.377697433" watchObservedRunningTime="2025-10-06 13:54:55.88369358 +0000 UTC m=+984.381012127" Oct 06 13:54:55 crc kubenswrapper[4757]: I1006 13:54:55.903966 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/test-operator-controller-manager-6bb6dcddc-g9hjb" podStartSLOduration=3.452887973 podStartE2EDuration="25.903947612s" podCreationTimestamp="2025-10-06 13:54:30 +0000 UTC" firstStartedPulling="2025-10-06 13:54:32.107827477 +0000 UTC m=+960.605146014" lastFinishedPulling="2025-10-06 13:54:54.558887116 +0000 UTC m=+983.056205653" observedRunningTime="2025-10-06 13:54:55.902504108 +0000 UTC m=+984.399822655" watchObservedRunningTime="2025-10-06 13:54:55.903947612 +0000 UTC m=+984.401266149" Oct 06 13:54:55 crc kubenswrapper[4757]: I1006 13:54:55.921702 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-6c9b57c67-qqbjc" podStartSLOduration=3.596031023 podStartE2EDuration="25.921679997s" podCreationTimestamp="2025-10-06 13:54:30 +0000 UTC" firstStartedPulling="2025-10-06 13:54:32.177144109 +0000 UTC m=+960.674462646" lastFinishedPulling="2025-10-06 13:54:54.502793043 +0000 UTC m=+983.000111620" observedRunningTime="2025-10-06 13:54:55.917253259 +0000 UTC m=+984.414571806" watchObservedRunningTime="2025-10-06 13:54:55.921679997 +0000 UTC m=+984.418998534" Oct 06 13:54:55 crc kubenswrapper[4757]: I1006 13:54:55.936855 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/placement-operator-controller-manager-66f6d6849b-pwcvf" podStartSLOduration=3.571880348 podStartE2EDuration="25.936836031s" podCreationTimestamp="2025-10-06 13:54:30 +0000 UTC" firstStartedPulling="2025-10-06 13:54:32.177475419 +0000 UTC m=+960.674793956" lastFinishedPulling="2025-10-06 13:54:54.542431102 +0000 UTC m=+983.039749639" observedRunningTime="2025-10-06 13:54:55.932036991 +0000 UTC m=+984.429355528" watchObservedRunningTime="2025-10-06 13:54:55.936836031 +0000 UTC m=+984.434154568" Oct 06 13:54:55 crc kubenswrapper[4757]: I1006 13:54:55.949061 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-84788b6bc5-ghbgp" podStartSLOduration=3.609056081 podStartE2EDuration="25.949042793s" podCreationTimestamp="2025-10-06 13:54:30 +0000 UTC" firstStartedPulling="2025-10-06 13:54:32.177447069 +0000 UTC m=+960.674765606" lastFinishedPulling="2025-10-06 13:54:54.517433781 +0000 UTC m=+983.014752318" observedRunningTime="2025-10-06 13:54:55.946639417 +0000 UTC m=+984.443957964" watchObservedRunningTime="2025-10-06 13:54:55.949042793 +0000 UTC m=+984.446361330" Oct 06 13:55:00 crc kubenswrapper[4757]: I1006 13:55:00.548533 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-6c9b57c67-qqbjc" Oct 06 13:55:00 crc kubenswrapper[4757]: I1006 13:55:00.900528 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-f589c7597-cnfmr" Oct 06 13:55:00 crc kubenswrapper[4757]: I1006 13:55:00.946724 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-6bb6dcddc-g9hjb" Oct 06 13:55:00 crc kubenswrapper[4757]: I1006 13:55:00.952944 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/placement-operator-controller-manager-66f6d6849b-pwcvf" Oct 06 13:55:01 crc kubenswrapper[4757]: I1006 13:55:01.000251 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-76d5577b-szvb9" Oct 06 13:55:01 crc kubenswrapper[4757]: I1006 13:55:01.113502 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-84788b6bc5-ghbgp" Oct 06 13:55:01 crc kubenswrapper[4757]: I1006 13:55:01.279555 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-5d98cc5575-shmfd" Oct 06 13:55:17 crc kubenswrapper[4757]: I1006 13:55:17.378511 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-664d9cb979-5k8n7"] Oct 06 13:55:17 crc kubenswrapper[4757]: I1006 13:55:17.380695 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-664d9cb979-5k8n7" Oct 06 13:55:17 crc kubenswrapper[4757]: I1006 13:55:17.383431 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Oct 06 13:55:17 crc kubenswrapper[4757]: I1006 13:55:17.385827 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Oct 06 13:55:17 crc kubenswrapper[4757]: I1006 13:55:17.387325 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-tjdcd" Oct 06 13:55:17 crc kubenswrapper[4757]: I1006 13:55:17.389892 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-664d9cb979-5k8n7"] Oct 06 13:55:17 crc kubenswrapper[4757]: I1006 13:55:17.404952 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Oct 06 13:55:17 crc kubenswrapper[4757]: I1006 13:55:17.449687 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5999f47797-q8xzg"] Oct 06 13:55:17 crc kubenswrapper[4757]: I1006 13:55:17.451617 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5999f47797-q8xzg" Oct 06 13:55:17 crc kubenswrapper[4757]: I1006 13:55:17.458002 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Oct 06 13:55:17 crc kubenswrapper[4757]: I1006 13:55:17.472299 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5999f47797-q8xzg"] Oct 06 13:55:17 crc kubenswrapper[4757]: I1006 13:55:17.539608 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cd52b680-148d-4843-b2f7-8cba6eb2d0cb-config\") pod \"dnsmasq-dns-664d9cb979-5k8n7\" (UID: \"cd52b680-148d-4843-b2f7-8cba6eb2d0cb\") " pod="openstack/dnsmasq-dns-664d9cb979-5k8n7" Oct 06 13:55:17 crc kubenswrapper[4757]: I1006 13:55:17.539783 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cc6hx\" (UniqueName: \"kubernetes.io/projected/cd52b680-148d-4843-b2f7-8cba6eb2d0cb-kube-api-access-cc6hx\") pod \"dnsmasq-dns-664d9cb979-5k8n7\" (UID: \"cd52b680-148d-4843-b2f7-8cba6eb2d0cb\") " pod="openstack/dnsmasq-dns-664d9cb979-5k8n7" Oct 06 13:55:17 crc kubenswrapper[4757]: I1006 13:55:17.641416 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/743e2322-0e83-419c-8866-2fc223be0821-dns-svc\") pod \"dnsmasq-dns-5999f47797-q8xzg\" (UID: \"743e2322-0e83-419c-8866-2fc223be0821\") " pod="openstack/dnsmasq-dns-5999f47797-q8xzg" Oct 06 13:55:17 crc kubenswrapper[4757]: I1006 13:55:17.641509 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cc6hx\" (UniqueName: \"kubernetes.io/projected/cd52b680-148d-4843-b2f7-8cba6eb2d0cb-kube-api-access-cc6hx\") pod \"dnsmasq-dns-664d9cb979-5k8n7\" (UID: \"cd52b680-148d-4843-b2f7-8cba6eb2d0cb\") " pod="openstack/dnsmasq-dns-664d9cb979-5k8n7" Oct 06 13:55:17 crc kubenswrapper[4757]: I1006 13:55:17.641535 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cd52b680-148d-4843-b2f7-8cba6eb2d0cb-config\") pod \"dnsmasq-dns-664d9cb979-5k8n7\" (UID: \"cd52b680-148d-4843-b2f7-8cba6eb2d0cb\") " pod="openstack/dnsmasq-dns-664d9cb979-5k8n7" Oct 06 13:55:17 crc kubenswrapper[4757]: I1006 13:55:17.641577 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f547c\" (UniqueName: \"kubernetes.io/projected/743e2322-0e83-419c-8866-2fc223be0821-kube-api-access-f547c\") pod \"dnsmasq-dns-5999f47797-q8xzg\" (UID: \"743e2322-0e83-419c-8866-2fc223be0821\") " pod="openstack/dnsmasq-dns-5999f47797-q8xzg" Oct 06 13:55:17 crc kubenswrapper[4757]: I1006 13:55:17.641666 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/743e2322-0e83-419c-8866-2fc223be0821-config\") pod \"dnsmasq-dns-5999f47797-q8xzg\" (UID: \"743e2322-0e83-419c-8866-2fc223be0821\") " pod="openstack/dnsmasq-dns-5999f47797-q8xzg" Oct 06 13:55:17 crc kubenswrapper[4757]: I1006 13:55:17.642911 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cd52b680-148d-4843-b2f7-8cba6eb2d0cb-config\") pod \"dnsmasq-dns-664d9cb979-5k8n7\" (UID: \"cd52b680-148d-4843-b2f7-8cba6eb2d0cb\") " pod="openstack/dnsmasq-dns-664d9cb979-5k8n7" Oct 06 13:55:17 crc kubenswrapper[4757]: I1006 13:55:17.676343 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cc6hx\" (UniqueName: \"kubernetes.io/projected/cd52b680-148d-4843-b2f7-8cba6eb2d0cb-kube-api-access-cc6hx\") pod \"dnsmasq-dns-664d9cb979-5k8n7\" (UID: \"cd52b680-148d-4843-b2f7-8cba6eb2d0cb\") " pod="openstack/dnsmasq-dns-664d9cb979-5k8n7" Oct 06 13:55:17 crc kubenswrapper[4757]: I1006 13:55:17.711160 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-664d9cb979-5k8n7" Oct 06 13:55:17 crc kubenswrapper[4757]: I1006 13:55:17.747758 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/743e2322-0e83-419c-8866-2fc223be0821-config\") pod \"dnsmasq-dns-5999f47797-q8xzg\" (UID: \"743e2322-0e83-419c-8866-2fc223be0821\") " pod="openstack/dnsmasq-dns-5999f47797-q8xzg" Oct 06 13:55:17 crc kubenswrapper[4757]: I1006 13:55:17.745776 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/743e2322-0e83-419c-8866-2fc223be0821-config\") pod \"dnsmasq-dns-5999f47797-q8xzg\" (UID: \"743e2322-0e83-419c-8866-2fc223be0821\") " pod="openstack/dnsmasq-dns-5999f47797-q8xzg" Oct 06 13:55:17 crc kubenswrapper[4757]: I1006 13:55:17.747956 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/743e2322-0e83-419c-8866-2fc223be0821-dns-svc\") pod \"dnsmasq-dns-5999f47797-q8xzg\" (UID: \"743e2322-0e83-419c-8866-2fc223be0821\") " pod="openstack/dnsmasq-dns-5999f47797-q8xzg" Oct 06 13:55:17 crc kubenswrapper[4757]: I1006 13:55:17.748753 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/743e2322-0e83-419c-8866-2fc223be0821-dns-svc\") pod \"dnsmasq-dns-5999f47797-q8xzg\" (UID: \"743e2322-0e83-419c-8866-2fc223be0821\") " pod="openstack/dnsmasq-dns-5999f47797-q8xzg" Oct 06 13:55:17 crc kubenswrapper[4757]: I1006 13:55:17.748960 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f547c\" (UniqueName: \"kubernetes.io/projected/743e2322-0e83-419c-8866-2fc223be0821-kube-api-access-f547c\") pod \"dnsmasq-dns-5999f47797-q8xzg\" (UID: \"743e2322-0e83-419c-8866-2fc223be0821\") " pod="openstack/dnsmasq-dns-5999f47797-q8xzg" Oct 06 13:55:17 crc kubenswrapper[4757]: I1006 13:55:17.772404 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f547c\" (UniqueName: \"kubernetes.io/projected/743e2322-0e83-419c-8866-2fc223be0821-kube-api-access-f547c\") pod \"dnsmasq-dns-5999f47797-q8xzg\" (UID: \"743e2322-0e83-419c-8866-2fc223be0821\") " pod="openstack/dnsmasq-dns-5999f47797-q8xzg" Oct 06 13:55:17 crc kubenswrapper[4757]: I1006 13:55:17.775161 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5999f47797-q8xzg" Oct 06 13:55:18 crc kubenswrapper[4757]: I1006 13:55:18.172434 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-664d9cb979-5k8n7"] Oct 06 13:55:18 crc kubenswrapper[4757]: W1006 13:55:18.175997 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcd52b680_148d_4843_b2f7_8cba6eb2d0cb.slice/crio-36a1b45588e112b2d029d10f6debff0fdccbad81aeb651b6c31b66d0f54c9aca WatchSource:0}: Error finding container 36a1b45588e112b2d029d10f6debff0fdccbad81aeb651b6c31b66d0f54c9aca: Status 404 returned error can't find the container with id 36a1b45588e112b2d029d10f6debff0fdccbad81aeb651b6c31b66d0f54c9aca Oct 06 13:55:18 crc kubenswrapper[4757]: I1006 13:55:18.178125 4757 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 06 13:55:18 crc kubenswrapper[4757]: I1006 13:55:18.250460 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5999f47797-q8xzg"] Oct 06 13:55:18 crc kubenswrapper[4757]: W1006 13:55:18.255806 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod743e2322_0e83_419c_8866_2fc223be0821.slice/crio-acdc8c22b03a823079bbf9f6c9c793e9f3101c2527ef13fe99a514be99565f6f WatchSource:0}: Error finding container acdc8c22b03a823079bbf9f6c9c793e9f3101c2527ef13fe99a514be99565f6f: Status 404 returned error can't find the container with id acdc8c22b03a823079bbf9f6c9c793e9f3101c2527ef13fe99a514be99565f6f Oct 06 13:55:19 crc kubenswrapper[4757]: I1006 13:55:19.011165 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5999f47797-q8xzg" event={"ID":"743e2322-0e83-419c-8866-2fc223be0821","Type":"ContainerStarted","Data":"acdc8c22b03a823079bbf9f6c9c793e9f3101c2527ef13fe99a514be99565f6f"} Oct 06 13:55:19 crc kubenswrapper[4757]: I1006 13:55:19.013270 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-664d9cb979-5k8n7" event={"ID":"cd52b680-148d-4843-b2f7-8cba6eb2d0cb","Type":"ContainerStarted","Data":"36a1b45588e112b2d029d10f6debff0fdccbad81aeb651b6c31b66d0f54c9aca"} Oct 06 13:55:20 crc kubenswrapper[4757]: I1006 13:55:20.150692 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5999f47797-q8xzg"] Oct 06 13:55:20 crc kubenswrapper[4757]: I1006 13:55:20.175041 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-549d5b8c5c-xm87j"] Oct 06 13:55:20 crc kubenswrapper[4757]: I1006 13:55:20.176231 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-549d5b8c5c-xm87j" Oct 06 13:55:20 crc kubenswrapper[4757]: I1006 13:55:20.195568 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-549d5b8c5c-xm87j"] Oct 06 13:55:20 crc kubenswrapper[4757]: I1006 13:55:20.289378 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ee7e3a27-b811-4424-9619-edb35acb3527-config\") pod \"dnsmasq-dns-549d5b8c5c-xm87j\" (UID: \"ee7e3a27-b811-4424-9619-edb35acb3527\") " pod="openstack/dnsmasq-dns-549d5b8c5c-xm87j" Oct 06 13:55:20 crc kubenswrapper[4757]: I1006 13:55:20.289484 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f84r2\" (UniqueName: \"kubernetes.io/projected/ee7e3a27-b811-4424-9619-edb35acb3527-kube-api-access-f84r2\") pod \"dnsmasq-dns-549d5b8c5c-xm87j\" (UID: \"ee7e3a27-b811-4424-9619-edb35acb3527\") " pod="openstack/dnsmasq-dns-549d5b8c5c-xm87j" Oct 06 13:55:20 crc kubenswrapper[4757]: I1006 13:55:20.289537 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ee7e3a27-b811-4424-9619-edb35acb3527-dns-svc\") pod \"dnsmasq-dns-549d5b8c5c-xm87j\" (UID: \"ee7e3a27-b811-4424-9619-edb35acb3527\") " pod="openstack/dnsmasq-dns-549d5b8c5c-xm87j" Oct 06 13:55:20 crc kubenswrapper[4757]: I1006 13:55:20.391906 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f84r2\" (UniqueName: \"kubernetes.io/projected/ee7e3a27-b811-4424-9619-edb35acb3527-kube-api-access-f84r2\") pod \"dnsmasq-dns-549d5b8c5c-xm87j\" (UID: \"ee7e3a27-b811-4424-9619-edb35acb3527\") " pod="openstack/dnsmasq-dns-549d5b8c5c-xm87j" Oct 06 13:55:20 crc kubenswrapper[4757]: I1006 13:55:20.392030 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ee7e3a27-b811-4424-9619-edb35acb3527-dns-svc\") pod \"dnsmasq-dns-549d5b8c5c-xm87j\" (UID: \"ee7e3a27-b811-4424-9619-edb35acb3527\") " pod="openstack/dnsmasq-dns-549d5b8c5c-xm87j" Oct 06 13:55:20 crc kubenswrapper[4757]: I1006 13:55:20.392094 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ee7e3a27-b811-4424-9619-edb35acb3527-config\") pod \"dnsmasq-dns-549d5b8c5c-xm87j\" (UID: \"ee7e3a27-b811-4424-9619-edb35acb3527\") " pod="openstack/dnsmasq-dns-549d5b8c5c-xm87j" Oct 06 13:55:20 crc kubenswrapper[4757]: I1006 13:55:20.397717 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ee7e3a27-b811-4424-9619-edb35acb3527-config\") pod \"dnsmasq-dns-549d5b8c5c-xm87j\" (UID: \"ee7e3a27-b811-4424-9619-edb35acb3527\") " pod="openstack/dnsmasq-dns-549d5b8c5c-xm87j" Oct 06 13:55:20 crc kubenswrapper[4757]: I1006 13:55:20.398448 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ee7e3a27-b811-4424-9619-edb35acb3527-dns-svc\") pod \"dnsmasq-dns-549d5b8c5c-xm87j\" (UID: \"ee7e3a27-b811-4424-9619-edb35acb3527\") " pod="openstack/dnsmasq-dns-549d5b8c5c-xm87j" Oct 06 13:55:20 crc kubenswrapper[4757]: I1006 13:55:20.414529 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f84r2\" (UniqueName: \"kubernetes.io/projected/ee7e3a27-b811-4424-9619-edb35acb3527-kube-api-access-f84r2\") pod \"dnsmasq-dns-549d5b8c5c-xm87j\" (UID: \"ee7e3a27-b811-4424-9619-edb35acb3527\") " pod="openstack/dnsmasq-dns-549d5b8c5c-xm87j" Oct 06 13:55:20 crc kubenswrapper[4757]: I1006 13:55:20.427122 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-664d9cb979-5k8n7"] Oct 06 13:55:20 crc kubenswrapper[4757]: I1006 13:55:20.458804 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-74b957b89f-7vw7l"] Oct 06 13:55:20 crc kubenswrapper[4757]: I1006 13:55:20.460556 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74b957b89f-7vw7l" Oct 06 13:55:20 crc kubenswrapper[4757]: I1006 13:55:20.474798 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-74b957b89f-7vw7l"] Oct 06 13:55:20 crc kubenswrapper[4757]: I1006 13:55:20.561522 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-549d5b8c5c-xm87j" Oct 06 13:55:20 crc kubenswrapper[4757]: I1006 13:55:20.594620 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1df6a3ef-a968-4f91-a58d-4fa75a44130a-config\") pod \"dnsmasq-dns-74b957b89f-7vw7l\" (UID: \"1df6a3ef-a968-4f91-a58d-4fa75a44130a\") " pod="openstack/dnsmasq-dns-74b957b89f-7vw7l" Oct 06 13:55:20 crc kubenswrapper[4757]: I1006 13:55:20.594686 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1df6a3ef-a968-4f91-a58d-4fa75a44130a-dns-svc\") pod \"dnsmasq-dns-74b957b89f-7vw7l\" (UID: \"1df6a3ef-a968-4f91-a58d-4fa75a44130a\") " pod="openstack/dnsmasq-dns-74b957b89f-7vw7l" Oct 06 13:55:20 crc kubenswrapper[4757]: I1006 13:55:20.594747 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sjx2z\" (UniqueName: \"kubernetes.io/projected/1df6a3ef-a968-4f91-a58d-4fa75a44130a-kube-api-access-sjx2z\") pod \"dnsmasq-dns-74b957b89f-7vw7l\" (UID: \"1df6a3ef-a968-4f91-a58d-4fa75a44130a\") " pod="openstack/dnsmasq-dns-74b957b89f-7vw7l" Oct 06 13:55:20 crc kubenswrapper[4757]: I1006 13:55:20.696374 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sjx2z\" (UniqueName: \"kubernetes.io/projected/1df6a3ef-a968-4f91-a58d-4fa75a44130a-kube-api-access-sjx2z\") pod \"dnsmasq-dns-74b957b89f-7vw7l\" (UID: \"1df6a3ef-a968-4f91-a58d-4fa75a44130a\") " pod="openstack/dnsmasq-dns-74b957b89f-7vw7l" Oct 06 13:55:20 crc kubenswrapper[4757]: I1006 13:55:20.696822 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1df6a3ef-a968-4f91-a58d-4fa75a44130a-config\") pod \"dnsmasq-dns-74b957b89f-7vw7l\" (UID: \"1df6a3ef-a968-4f91-a58d-4fa75a44130a\") " pod="openstack/dnsmasq-dns-74b957b89f-7vw7l" Oct 06 13:55:20 crc kubenswrapper[4757]: I1006 13:55:20.696856 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1df6a3ef-a968-4f91-a58d-4fa75a44130a-dns-svc\") pod \"dnsmasq-dns-74b957b89f-7vw7l\" (UID: \"1df6a3ef-a968-4f91-a58d-4fa75a44130a\") " pod="openstack/dnsmasq-dns-74b957b89f-7vw7l" Oct 06 13:55:20 crc kubenswrapper[4757]: I1006 13:55:20.697836 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1df6a3ef-a968-4f91-a58d-4fa75a44130a-dns-svc\") pod \"dnsmasq-dns-74b957b89f-7vw7l\" (UID: \"1df6a3ef-a968-4f91-a58d-4fa75a44130a\") " pod="openstack/dnsmasq-dns-74b957b89f-7vw7l" Oct 06 13:55:20 crc kubenswrapper[4757]: I1006 13:55:20.698216 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1df6a3ef-a968-4f91-a58d-4fa75a44130a-config\") pod \"dnsmasq-dns-74b957b89f-7vw7l\" (UID: \"1df6a3ef-a968-4f91-a58d-4fa75a44130a\") " pod="openstack/dnsmasq-dns-74b957b89f-7vw7l" Oct 06 13:55:20 crc kubenswrapper[4757]: I1006 13:55:20.723816 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sjx2z\" (UniqueName: \"kubernetes.io/projected/1df6a3ef-a968-4f91-a58d-4fa75a44130a-kube-api-access-sjx2z\") pod \"dnsmasq-dns-74b957b89f-7vw7l\" (UID: \"1df6a3ef-a968-4f91-a58d-4fa75a44130a\") " pod="openstack/dnsmasq-dns-74b957b89f-7vw7l" Oct 06 13:55:20 crc kubenswrapper[4757]: I1006 13:55:20.793842 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74b957b89f-7vw7l" Oct 06 13:55:21 crc kubenswrapper[4757]: I1006 13:55:21.066361 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-549d5b8c5c-xm87j"] Oct 06 13:55:21 crc kubenswrapper[4757]: I1006 13:55:21.243691 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-74b957b89f-7vw7l"] Oct 06 13:55:21 crc kubenswrapper[4757]: I1006 13:55:21.327217 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Oct 06 13:55:21 crc kubenswrapper[4757]: I1006 13:55:21.331724 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 06 13:55:21 crc kubenswrapper[4757]: I1006 13:55:21.334463 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Oct 06 13:55:21 crc kubenswrapper[4757]: I1006 13:55:21.334556 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Oct 06 13:55:21 crc kubenswrapper[4757]: I1006 13:55:21.334664 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Oct 06 13:55:21 crc kubenswrapper[4757]: I1006 13:55:21.334676 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Oct 06 13:55:21 crc kubenswrapper[4757]: I1006 13:55:21.334736 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Oct 06 13:55:21 crc kubenswrapper[4757]: I1006 13:55:21.334806 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Oct 06 13:55:21 crc kubenswrapper[4757]: I1006 13:55:21.335776 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-s5pq9" Oct 06 13:55:21 crc kubenswrapper[4757]: I1006 13:55:21.359138 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 06 13:55:21 crc kubenswrapper[4757]: I1006 13:55:21.512652 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7\") " pod="openstack/rabbitmq-server-0" Oct 06 13:55:21 crc kubenswrapper[4757]: I1006 13:55:21.512706 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7\") " pod="openstack/rabbitmq-server-0" Oct 06 13:55:21 crc kubenswrapper[4757]: I1006 13:55:21.512731 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7\") " pod="openstack/rabbitmq-server-0" Oct 06 13:55:21 crc kubenswrapper[4757]: I1006 13:55:21.512918 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fk52f\" (UniqueName: \"kubernetes.io/projected/0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7-kube-api-access-fk52f\") pod \"rabbitmq-server-0\" (UID: \"0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7\") " pod="openstack/rabbitmq-server-0" Oct 06 13:55:21 crc kubenswrapper[4757]: I1006 13:55:21.513025 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"rabbitmq-server-0\" (UID: \"0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7\") " pod="openstack/rabbitmq-server-0" Oct 06 13:55:21 crc kubenswrapper[4757]: I1006 13:55:21.513056 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7\") " pod="openstack/rabbitmq-server-0" Oct 06 13:55:21 crc kubenswrapper[4757]: I1006 13:55:21.513081 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7-pod-info\") pod \"rabbitmq-server-0\" (UID: \"0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7\") " pod="openstack/rabbitmq-server-0" Oct 06 13:55:21 crc kubenswrapper[4757]: I1006 13:55:21.513132 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7\") " pod="openstack/rabbitmq-server-0" Oct 06 13:55:21 crc kubenswrapper[4757]: I1006 13:55:21.513179 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7-config-data\") pod \"rabbitmq-server-0\" (UID: \"0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7\") " pod="openstack/rabbitmq-server-0" Oct 06 13:55:21 crc kubenswrapper[4757]: I1006 13:55:21.513229 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7\") " pod="openstack/rabbitmq-server-0" Oct 06 13:55:21 crc kubenswrapper[4757]: I1006 13:55:21.513268 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7-server-conf\") pod \"rabbitmq-server-0\" (UID: \"0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7\") " pod="openstack/rabbitmq-server-0" Oct 06 13:55:21 crc kubenswrapper[4757]: I1006 13:55:21.575970 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 06 13:55:21 crc kubenswrapper[4757]: I1006 13:55:21.579259 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 06 13:55:21 crc kubenswrapper[4757]: I1006 13:55:21.581403 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Oct 06 13:55:21 crc kubenswrapper[4757]: I1006 13:55:21.581694 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Oct 06 13:55:21 crc kubenswrapper[4757]: I1006 13:55:21.581809 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Oct 06 13:55:21 crc kubenswrapper[4757]: I1006 13:55:21.582049 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Oct 06 13:55:21 crc kubenswrapper[4757]: I1006 13:55:21.582388 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-99plw" Oct 06 13:55:21 crc kubenswrapper[4757]: I1006 13:55:21.582761 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Oct 06 13:55:21 crc kubenswrapper[4757]: I1006 13:55:21.583401 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Oct 06 13:55:21 crc kubenswrapper[4757]: I1006 13:55:21.592615 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 06 13:55:21 crc kubenswrapper[4757]: I1006 13:55:21.614056 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7\") " pod="openstack/rabbitmq-server-0" Oct 06 13:55:21 crc kubenswrapper[4757]: I1006 13:55:21.614109 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7-server-conf\") pod \"rabbitmq-server-0\" (UID: \"0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7\") " pod="openstack/rabbitmq-server-0" Oct 06 13:55:21 crc kubenswrapper[4757]: I1006 13:55:21.614165 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7\") " pod="openstack/rabbitmq-server-0" Oct 06 13:55:21 crc kubenswrapper[4757]: I1006 13:55:21.614198 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7\") " pod="openstack/rabbitmq-server-0" Oct 06 13:55:21 crc kubenswrapper[4757]: I1006 13:55:21.614220 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7\") " pod="openstack/rabbitmq-server-0" Oct 06 13:55:21 crc kubenswrapper[4757]: I1006 13:55:21.614251 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fk52f\" (UniqueName: \"kubernetes.io/projected/0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7-kube-api-access-fk52f\") pod \"rabbitmq-server-0\" (UID: \"0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7\") " pod="openstack/rabbitmq-server-0" Oct 06 13:55:21 crc kubenswrapper[4757]: I1006 13:55:21.614275 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"rabbitmq-server-0\" (UID: \"0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7\") " pod="openstack/rabbitmq-server-0" Oct 06 13:55:21 crc kubenswrapper[4757]: I1006 13:55:21.614291 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7\") " pod="openstack/rabbitmq-server-0" Oct 06 13:55:21 crc kubenswrapper[4757]: I1006 13:55:21.614305 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7-pod-info\") pod \"rabbitmq-server-0\" (UID: \"0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7\") " pod="openstack/rabbitmq-server-0" Oct 06 13:55:21 crc kubenswrapper[4757]: I1006 13:55:21.614324 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7\") " pod="openstack/rabbitmq-server-0" Oct 06 13:55:21 crc kubenswrapper[4757]: I1006 13:55:21.614347 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7-config-data\") pod \"rabbitmq-server-0\" (UID: \"0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7\") " pod="openstack/rabbitmq-server-0" Oct 06 13:55:21 crc kubenswrapper[4757]: I1006 13:55:21.614537 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7\") " pod="openstack/rabbitmq-server-0" Oct 06 13:55:21 crc kubenswrapper[4757]: I1006 13:55:21.614932 4757 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"rabbitmq-server-0\" (UID: \"0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7\") device mount path \"/mnt/openstack/pv06\"" pod="openstack/rabbitmq-server-0" Oct 06 13:55:21 crc kubenswrapper[4757]: I1006 13:55:21.615228 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7-config-data\") pod \"rabbitmq-server-0\" (UID: \"0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7\") " pod="openstack/rabbitmq-server-0" Oct 06 13:55:21 crc kubenswrapper[4757]: I1006 13:55:21.615569 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7\") " pod="openstack/rabbitmq-server-0" Oct 06 13:55:21 crc kubenswrapper[4757]: I1006 13:55:21.615860 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7\") " pod="openstack/rabbitmq-server-0" Oct 06 13:55:21 crc kubenswrapper[4757]: I1006 13:55:21.616893 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7-server-conf\") pod \"rabbitmq-server-0\" (UID: \"0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7\") " pod="openstack/rabbitmq-server-0" Oct 06 13:55:21 crc kubenswrapper[4757]: I1006 13:55:21.621939 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7-pod-info\") pod \"rabbitmq-server-0\" (UID: \"0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7\") " pod="openstack/rabbitmq-server-0" Oct 06 13:55:21 crc kubenswrapper[4757]: I1006 13:55:21.622271 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7\") " pod="openstack/rabbitmq-server-0" Oct 06 13:55:21 crc kubenswrapper[4757]: I1006 13:55:21.625150 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7\") " pod="openstack/rabbitmq-server-0" Oct 06 13:55:21 crc kubenswrapper[4757]: I1006 13:55:21.627588 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7\") " pod="openstack/rabbitmq-server-0" Oct 06 13:55:21 crc kubenswrapper[4757]: I1006 13:55:21.634637 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"rabbitmq-server-0\" (UID: \"0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7\") " pod="openstack/rabbitmq-server-0" Oct 06 13:55:21 crc kubenswrapper[4757]: I1006 13:55:21.636605 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fk52f\" (UniqueName: \"kubernetes.io/projected/0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7-kube-api-access-fk52f\") pod \"rabbitmq-server-0\" (UID: \"0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7\") " pod="openstack/rabbitmq-server-0" Oct 06 13:55:21 crc kubenswrapper[4757]: I1006 13:55:21.665714 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 06 13:55:21 crc kubenswrapper[4757]: I1006 13:55:21.717324 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61\") " pod="openstack/rabbitmq-cell1-server-0" Oct 06 13:55:21 crc kubenswrapper[4757]: I1006 13:55:21.717378 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61\") " pod="openstack/rabbitmq-cell1-server-0" Oct 06 13:55:21 crc kubenswrapper[4757]: I1006 13:55:21.717404 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61\") " pod="openstack/rabbitmq-cell1-server-0" Oct 06 13:55:21 crc kubenswrapper[4757]: I1006 13:55:21.717426 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61\") " pod="openstack/rabbitmq-cell1-server-0" Oct 06 13:55:21 crc kubenswrapper[4757]: I1006 13:55:21.717457 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61\") " pod="openstack/rabbitmq-cell1-server-0" Oct 06 13:55:21 crc kubenswrapper[4757]: I1006 13:55:21.717487 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61\") " pod="openstack/rabbitmq-cell1-server-0" Oct 06 13:55:21 crc kubenswrapper[4757]: I1006 13:55:21.717513 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61\") " pod="openstack/rabbitmq-cell1-server-0" Oct 06 13:55:21 crc kubenswrapper[4757]: I1006 13:55:21.717547 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61\") " pod="openstack/rabbitmq-cell1-server-0" Oct 06 13:55:21 crc kubenswrapper[4757]: I1006 13:55:21.717566 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61\") " pod="openstack/rabbitmq-cell1-server-0" Oct 06 13:55:21 crc kubenswrapper[4757]: I1006 13:55:21.717588 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61\") " pod="openstack/rabbitmq-cell1-server-0" Oct 06 13:55:21 crc kubenswrapper[4757]: I1006 13:55:21.717612 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7ttqm\" (UniqueName: \"kubernetes.io/projected/cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61-kube-api-access-7ttqm\") pod \"rabbitmq-cell1-server-0\" (UID: \"cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61\") " pod="openstack/rabbitmq-cell1-server-0" Oct 06 13:55:21 crc kubenswrapper[4757]: I1006 13:55:21.819382 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7ttqm\" (UniqueName: \"kubernetes.io/projected/cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61-kube-api-access-7ttqm\") pod \"rabbitmq-cell1-server-0\" (UID: \"cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61\") " pod="openstack/rabbitmq-cell1-server-0" Oct 06 13:55:21 crc kubenswrapper[4757]: I1006 13:55:21.819449 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61\") " pod="openstack/rabbitmq-cell1-server-0" Oct 06 13:55:21 crc kubenswrapper[4757]: I1006 13:55:21.819478 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61\") " pod="openstack/rabbitmq-cell1-server-0" Oct 06 13:55:21 crc kubenswrapper[4757]: I1006 13:55:21.819508 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61\") " pod="openstack/rabbitmq-cell1-server-0" Oct 06 13:55:21 crc kubenswrapper[4757]: I1006 13:55:21.819538 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61\") " pod="openstack/rabbitmq-cell1-server-0" Oct 06 13:55:21 crc kubenswrapper[4757]: I1006 13:55:21.820013 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61\") " pod="openstack/rabbitmq-cell1-server-0" Oct 06 13:55:21 crc kubenswrapper[4757]: I1006 13:55:21.820018 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61\") " pod="openstack/rabbitmq-cell1-server-0" Oct 06 13:55:21 crc kubenswrapper[4757]: I1006 13:55:21.820058 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61\") " pod="openstack/rabbitmq-cell1-server-0" Oct 06 13:55:21 crc kubenswrapper[4757]: I1006 13:55:21.820091 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61\") " pod="openstack/rabbitmq-cell1-server-0" Oct 06 13:55:21 crc kubenswrapper[4757]: I1006 13:55:21.820136 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61\") " pod="openstack/rabbitmq-cell1-server-0" Oct 06 13:55:21 crc kubenswrapper[4757]: I1006 13:55:21.820168 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61\") " pod="openstack/rabbitmq-cell1-server-0" Oct 06 13:55:21 crc kubenswrapper[4757]: I1006 13:55:21.820187 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61\") " pod="openstack/rabbitmq-cell1-server-0" Oct 06 13:55:21 crc kubenswrapper[4757]: I1006 13:55:21.820210 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61\") " pod="openstack/rabbitmq-cell1-server-0" Oct 06 13:55:21 crc kubenswrapper[4757]: I1006 13:55:21.820453 4757 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/rabbitmq-cell1-server-0" Oct 06 13:55:21 crc kubenswrapper[4757]: I1006 13:55:21.821074 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61\") " pod="openstack/rabbitmq-cell1-server-0" Oct 06 13:55:21 crc kubenswrapper[4757]: I1006 13:55:21.822137 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61\") " pod="openstack/rabbitmq-cell1-server-0" Oct 06 13:55:21 crc kubenswrapper[4757]: I1006 13:55:21.823141 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61\") " pod="openstack/rabbitmq-cell1-server-0" Oct 06 13:55:21 crc kubenswrapper[4757]: I1006 13:55:21.823682 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61\") " pod="openstack/rabbitmq-cell1-server-0" Oct 06 13:55:21 crc kubenswrapper[4757]: I1006 13:55:21.833022 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61\") " pod="openstack/rabbitmq-cell1-server-0" Oct 06 13:55:21 crc kubenswrapper[4757]: I1006 13:55:21.833368 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61\") " pod="openstack/rabbitmq-cell1-server-0" Oct 06 13:55:21 crc kubenswrapper[4757]: I1006 13:55:21.834673 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61\") " pod="openstack/rabbitmq-cell1-server-0" Oct 06 13:55:21 crc kubenswrapper[4757]: I1006 13:55:21.836531 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7ttqm\" (UniqueName: \"kubernetes.io/projected/cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61-kube-api-access-7ttqm\") pod \"rabbitmq-cell1-server-0\" (UID: \"cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61\") " pod="openstack/rabbitmq-cell1-server-0" Oct 06 13:55:21 crc kubenswrapper[4757]: I1006 13:55:21.840431 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61\") " pod="openstack/rabbitmq-cell1-server-0" Oct 06 13:55:21 crc kubenswrapper[4757]: I1006 13:55:21.921722 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 06 13:55:22 crc kubenswrapper[4757]: I1006 13:55:22.986067 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Oct 06 13:55:22 crc kubenswrapper[4757]: I1006 13:55:22.997674 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Oct 06 13:55:23 crc kubenswrapper[4757]: I1006 13:55:23.005573 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Oct 06 13:55:23 crc kubenswrapper[4757]: I1006 13:55:23.005594 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Oct 06 13:55:23 crc kubenswrapper[4757]: I1006 13:55:23.006659 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Oct 06 13:55:23 crc kubenswrapper[4757]: I1006 13:55:23.008747 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Oct 06 13:55:23 crc kubenswrapper[4757]: I1006 13:55:23.009058 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-7gzsb" Oct 06 13:55:23 crc kubenswrapper[4757]: I1006 13:55:23.017114 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Oct 06 13:55:23 crc kubenswrapper[4757]: I1006 13:55:23.018010 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Oct 06 13:55:23 crc kubenswrapper[4757]: I1006 13:55:23.141856 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/1489eff7-41ff-420a-bce0-14247f8554ee-config-data-default\") pod \"openstack-galera-0\" (UID: \"1489eff7-41ff-420a-bce0-14247f8554ee\") " pod="openstack/openstack-galera-0" Oct 06 13:55:23 crc kubenswrapper[4757]: I1006 13:55:23.141936 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/1489eff7-41ff-420a-bce0-14247f8554ee-secrets\") pod \"openstack-galera-0\" (UID: \"1489eff7-41ff-420a-bce0-14247f8554ee\") " pod="openstack/openstack-galera-0" Oct 06 13:55:23 crc kubenswrapper[4757]: I1006 13:55:23.141982 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k9plq\" (UniqueName: \"kubernetes.io/projected/1489eff7-41ff-420a-bce0-14247f8554ee-kube-api-access-k9plq\") pod \"openstack-galera-0\" (UID: \"1489eff7-41ff-420a-bce0-14247f8554ee\") " pod="openstack/openstack-galera-0" Oct 06 13:55:23 crc kubenswrapper[4757]: I1006 13:55:23.142152 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1489eff7-41ff-420a-bce0-14247f8554ee-operator-scripts\") pod \"openstack-galera-0\" (UID: \"1489eff7-41ff-420a-bce0-14247f8554ee\") " pod="openstack/openstack-galera-0" Oct 06 13:55:23 crc kubenswrapper[4757]: I1006 13:55:23.142189 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/1489eff7-41ff-420a-bce0-14247f8554ee-kolla-config\") pod \"openstack-galera-0\" (UID: \"1489eff7-41ff-420a-bce0-14247f8554ee\") " pod="openstack/openstack-galera-0" Oct 06 13:55:23 crc kubenswrapper[4757]: I1006 13:55:23.142217 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/1489eff7-41ff-420a-bce0-14247f8554ee-config-data-generated\") pod \"openstack-galera-0\" (UID: \"1489eff7-41ff-420a-bce0-14247f8554ee\") " pod="openstack/openstack-galera-0" Oct 06 13:55:23 crc kubenswrapper[4757]: I1006 13:55:23.142262 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"openstack-galera-0\" (UID: \"1489eff7-41ff-420a-bce0-14247f8554ee\") " pod="openstack/openstack-galera-0" Oct 06 13:55:23 crc kubenswrapper[4757]: I1006 13:55:23.142293 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/1489eff7-41ff-420a-bce0-14247f8554ee-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"1489eff7-41ff-420a-bce0-14247f8554ee\") " pod="openstack/openstack-galera-0" Oct 06 13:55:23 crc kubenswrapper[4757]: I1006 13:55:23.142349 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1489eff7-41ff-420a-bce0-14247f8554ee-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"1489eff7-41ff-420a-bce0-14247f8554ee\") " pod="openstack/openstack-galera-0" Oct 06 13:55:23 crc kubenswrapper[4757]: I1006 13:55:23.244317 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/1489eff7-41ff-420a-bce0-14247f8554ee-config-data-default\") pod \"openstack-galera-0\" (UID: \"1489eff7-41ff-420a-bce0-14247f8554ee\") " pod="openstack/openstack-galera-0" Oct 06 13:55:23 crc kubenswrapper[4757]: I1006 13:55:23.244384 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/1489eff7-41ff-420a-bce0-14247f8554ee-secrets\") pod \"openstack-galera-0\" (UID: \"1489eff7-41ff-420a-bce0-14247f8554ee\") " pod="openstack/openstack-galera-0" Oct 06 13:55:23 crc kubenswrapper[4757]: I1006 13:55:23.244423 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k9plq\" (UniqueName: \"kubernetes.io/projected/1489eff7-41ff-420a-bce0-14247f8554ee-kube-api-access-k9plq\") pod \"openstack-galera-0\" (UID: \"1489eff7-41ff-420a-bce0-14247f8554ee\") " pod="openstack/openstack-galera-0" Oct 06 13:55:23 crc kubenswrapper[4757]: I1006 13:55:23.244489 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1489eff7-41ff-420a-bce0-14247f8554ee-operator-scripts\") pod \"openstack-galera-0\" (UID: \"1489eff7-41ff-420a-bce0-14247f8554ee\") " pod="openstack/openstack-galera-0" Oct 06 13:55:23 crc kubenswrapper[4757]: I1006 13:55:23.244527 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/1489eff7-41ff-420a-bce0-14247f8554ee-kolla-config\") pod \"openstack-galera-0\" (UID: \"1489eff7-41ff-420a-bce0-14247f8554ee\") " pod="openstack/openstack-galera-0" Oct 06 13:55:23 crc kubenswrapper[4757]: I1006 13:55:23.244553 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/1489eff7-41ff-420a-bce0-14247f8554ee-config-data-generated\") pod \"openstack-galera-0\" (UID: \"1489eff7-41ff-420a-bce0-14247f8554ee\") " pod="openstack/openstack-galera-0" Oct 06 13:55:23 crc kubenswrapper[4757]: I1006 13:55:23.244599 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"openstack-galera-0\" (UID: \"1489eff7-41ff-420a-bce0-14247f8554ee\") " pod="openstack/openstack-galera-0" Oct 06 13:55:23 crc kubenswrapper[4757]: I1006 13:55:23.244630 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/1489eff7-41ff-420a-bce0-14247f8554ee-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"1489eff7-41ff-420a-bce0-14247f8554ee\") " pod="openstack/openstack-galera-0" Oct 06 13:55:23 crc kubenswrapper[4757]: I1006 13:55:23.244675 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1489eff7-41ff-420a-bce0-14247f8554ee-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"1489eff7-41ff-420a-bce0-14247f8554ee\") " pod="openstack/openstack-galera-0" Oct 06 13:55:23 crc kubenswrapper[4757]: I1006 13:55:23.245400 4757 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"openstack-galera-0\" (UID: \"1489eff7-41ff-420a-bce0-14247f8554ee\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/openstack-galera-0" Oct 06 13:55:23 crc kubenswrapper[4757]: I1006 13:55:23.245832 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/1489eff7-41ff-420a-bce0-14247f8554ee-config-data-default\") pod \"openstack-galera-0\" (UID: \"1489eff7-41ff-420a-bce0-14247f8554ee\") " pod="openstack/openstack-galera-0" Oct 06 13:55:23 crc kubenswrapper[4757]: I1006 13:55:23.245958 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/1489eff7-41ff-420a-bce0-14247f8554ee-kolla-config\") pod \"openstack-galera-0\" (UID: \"1489eff7-41ff-420a-bce0-14247f8554ee\") " pod="openstack/openstack-galera-0" Oct 06 13:55:23 crc kubenswrapper[4757]: I1006 13:55:23.245990 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/1489eff7-41ff-420a-bce0-14247f8554ee-config-data-generated\") pod \"openstack-galera-0\" (UID: \"1489eff7-41ff-420a-bce0-14247f8554ee\") " pod="openstack/openstack-galera-0" Oct 06 13:55:23 crc kubenswrapper[4757]: I1006 13:55:23.247165 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1489eff7-41ff-420a-bce0-14247f8554ee-operator-scripts\") pod \"openstack-galera-0\" (UID: \"1489eff7-41ff-420a-bce0-14247f8554ee\") " pod="openstack/openstack-galera-0" Oct 06 13:55:23 crc kubenswrapper[4757]: I1006 13:55:23.249969 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/1489eff7-41ff-420a-bce0-14247f8554ee-secrets\") pod \"openstack-galera-0\" (UID: \"1489eff7-41ff-420a-bce0-14247f8554ee\") " pod="openstack/openstack-galera-0" Oct 06 13:55:23 crc kubenswrapper[4757]: I1006 13:55:23.250711 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/1489eff7-41ff-420a-bce0-14247f8554ee-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"1489eff7-41ff-420a-bce0-14247f8554ee\") " pod="openstack/openstack-galera-0" Oct 06 13:55:23 crc kubenswrapper[4757]: I1006 13:55:23.262415 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1489eff7-41ff-420a-bce0-14247f8554ee-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"1489eff7-41ff-420a-bce0-14247f8554ee\") " pod="openstack/openstack-galera-0" Oct 06 13:55:23 crc kubenswrapper[4757]: I1006 13:55:23.275145 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k9plq\" (UniqueName: \"kubernetes.io/projected/1489eff7-41ff-420a-bce0-14247f8554ee-kube-api-access-k9plq\") pod \"openstack-galera-0\" (UID: \"1489eff7-41ff-420a-bce0-14247f8554ee\") " pod="openstack/openstack-galera-0" Oct 06 13:55:23 crc kubenswrapper[4757]: I1006 13:55:23.276330 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"openstack-galera-0\" (UID: \"1489eff7-41ff-420a-bce0-14247f8554ee\") " pod="openstack/openstack-galera-0" Oct 06 13:55:23 crc kubenswrapper[4757]: I1006 13:55:23.331183 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Oct 06 13:55:24 crc kubenswrapper[4757]: I1006 13:55:24.059605 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Oct 06 13:55:24 crc kubenswrapper[4757]: I1006 13:55:24.061900 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Oct 06 13:55:24 crc kubenswrapper[4757]: I1006 13:55:24.065404 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Oct 06 13:55:24 crc kubenswrapper[4757]: I1006 13:55:24.065648 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Oct 06 13:55:24 crc kubenswrapper[4757]: I1006 13:55:24.065945 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-xdkdh" Oct 06 13:55:24 crc kubenswrapper[4757]: I1006 13:55:24.066122 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Oct 06 13:55:24 crc kubenswrapper[4757]: I1006 13:55:24.071623 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Oct 06 13:55:24 crc kubenswrapper[4757]: I1006 13:55:24.158042 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/39ef4372-3b20-44b5-b441-85f963e6a25a-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"39ef4372-3b20-44b5-b441-85f963e6a25a\") " pod="openstack/openstack-cell1-galera-0" Oct 06 13:55:24 crc kubenswrapper[4757]: I1006 13:55:24.158182 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/39ef4372-3b20-44b5-b441-85f963e6a25a-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"39ef4372-3b20-44b5-b441-85f963e6a25a\") " pod="openstack/openstack-cell1-galera-0" Oct 06 13:55:24 crc kubenswrapper[4757]: I1006 13:55:24.158421 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/39ef4372-3b20-44b5-b441-85f963e6a25a-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"39ef4372-3b20-44b5-b441-85f963e6a25a\") " pod="openstack/openstack-cell1-galera-0" Oct 06 13:55:24 crc kubenswrapper[4757]: I1006 13:55:24.158463 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/39ef4372-3b20-44b5-b441-85f963e6a25a-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"39ef4372-3b20-44b5-b441-85f963e6a25a\") " pod="openstack/openstack-cell1-galera-0" Oct 06 13:55:24 crc kubenswrapper[4757]: I1006 13:55:24.158500 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/39ef4372-3b20-44b5-b441-85f963e6a25a-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"39ef4372-3b20-44b5-b441-85f963e6a25a\") " pod="openstack/openstack-cell1-galera-0" Oct 06 13:55:24 crc kubenswrapper[4757]: I1006 13:55:24.158699 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j2w4w\" (UniqueName: \"kubernetes.io/projected/39ef4372-3b20-44b5-b441-85f963e6a25a-kube-api-access-j2w4w\") pod \"openstack-cell1-galera-0\" (UID: \"39ef4372-3b20-44b5-b441-85f963e6a25a\") " pod="openstack/openstack-cell1-galera-0" Oct 06 13:55:24 crc kubenswrapper[4757]: I1006 13:55:24.158742 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"openstack-cell1-galera-0\" (UID: \"39ef4372-3b20-44b5-b441-85f963e6a25a\") " pod="openstack/openstack-cell1-galera-0" Oct 06 13:55:24 crc kubenswrapper[4757]: I1006 13:55:24.158895 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/39ef4372-3b20-44b5-b441-85f963e6a25a-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"39ef4372-3b20-44b5-b441-85f963e6a25a\") " pod="openstack/openstack-cell1-galera-0" Oct 06 13:55:24 crc kubenswrapper[4757]: I1006 13:55:24.158925 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/39ef4372-3b20-44b5-b441-85f963e6a25a-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"39ef4372-3b20-44b5-b441-85f963e6a25a\") " pod="openstack/openstack-cell1-galera-0" Oct 06 13:55:24 crc kubenswrapper[4757]: I1006 13:55:24.259932 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/39ef4372-3b20-44b5-b441-85f963e6a25a-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"39ef4372-3b20-44b5-b441-85f963e6a25a\") " pod="openstack/openstack-cell1-galera-0" Oct 06 13:55:24 crc kubenswrapper[4757]: I1006 13:55:24.260336 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/39ef4372-3b20-44b5-b441-85f963e6a25a-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"39ef4372-3b20-44b5-b441-85f963e6a25a\") " pod="openstack/openstack-cell1-galera-0" Oct 06 13:55:24 crc kubenswrapper[4757]: I1006 13:55:24.260381 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/39ef4372-3b20-44b5-b441-85f963e6a25a-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"39ef4372-3b20-44b5-b441-85f963e6a25a\") " pod="openstack/openstack-cell1-galera-0" Oct 06 13:55:24 crc kubenswrapper[4757]: I1006 13:55:24.260412 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/39ef4372-3b20-44b5-b441-85f963e6a25a-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"39ef4372-3b20-44b5-b441-85f963e6a25a\") " pod="openstack/openstack-cell1-galera-0" Oct 06 13:55:24 crc kubenswrapper[4757]: I1006 13:55:24.260442 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/39ef4372-3b20-44b5-b441-85f963e6a25a-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"39ef4372-3b20-44b5-b441-85f963e6a25a\") " pod="openstack/openstack-cell1-galera-0" Oct 06 13:55:24 crc kubenswrapper[4757]: I1006 13:55:24.260500 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j2w4w\" (UniqueName: \"kubernetes.io/projected/39ef4372-3b20-44b5-b441-85f963e6a25a-kube-api-access-j2w4w\") pod \"openstack-cell1-galera-0\" (UID: \"39ef4372-3b20-44b5-b441-85f963e6a25a\") " pod="openstack/openstack-cell1-galera-0" Oct 06 13:55:24 crc kubenswrapper[4757]: I1006 13:55:24.260533 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"openstack-cell1-galera-0\" (UID: \"39ef4372-3b20-44b5-b441-85f963e6a25a\") " pod="openstack/openstack-cell1-galera-0" Oct 06 13:55:24 crc kubenswrapper[4757]: I1006 13:55:24.260789 4757 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"openstack-cell1-galera-0\" (UID: \"39ef4372-3b20-44b5-b441-85f963e6a25a\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/openstack-cell1-galera-0" Oct 06 13:55:24 crc kubenswrapper[4757]: I1006 13:55:24.261154 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/39ef4372-3b20-44b5-b441-85f963e6a25a-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"39ef4372-3b20-44b5-b441-85f963e6a25a\") " pod="openstack/openstack-cell1-galera-0" Oct 06 13:55:24 crc kubenswrapper[4757]: I1006 13:55:24.261190 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/39ef4372-3b20-44b5-b441-85f963e6a25a-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"39ef4372-3b20-44b5-b441-85f963e6a25a\") " pod="openstack/openstack-cell1-galera-0" Oct 06 13:55:24 crc kubenswrapper[4757]: I1006 13:55:24.261563 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/39ef4372-3b20-44b5-b441-85f963e6a25a-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"39ef4372-3b20-44b5-b441-85f963e6a25a\") " pod="openstack/openstack-cell1-galera-0" Oct 06 13:55:24 crc kubenswrapper[4757]: I1006 13:55:24.261675 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/39ef4372-3b20-44b5-b441-85f963e6a25a-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"39ef4372-3b20-44b5-b441-85f963e6a25a\") " pod="openstack/openstack-cell1-galera-0" Oct 06 13:55:24 crc kubenswrapper[4757]: I1006 13:55:24.261839 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/39ef4372-3b20-44b5-b441-85f963e6a25a-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"39ef4372-3b20-44b5-b441-85f963e6a25a\") " pod="openstack/openstack-cell1-galera-0" Oct 06 13:55:24 crc kubenswrapper[4757]: I1006 13:55:24.262527 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/39ef4372-3b20-44b5-b441-85f963e6a25a-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"39ef4372-3b20-44b5-b441-85f963e6a25a\") " pod="openstack/openstack-cell1-galera-0" Oct 06 13:55:24 crc kubenswrapper[4757]: I1006 13:55:24.265443 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/39ef4372-3b20-44b5-b441-85f963e6a25a-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"39ef4372-3b20-44b5-b441-85f963e6a25a\") " pod="openstack/openstack-cell1-galera-0" Oct 06 13:55:24 crc kubenswrapper[4757]: I1006 13:55:24.268027 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/39ef4372-3b20-44b5-b441-85f963e6a25a-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"39ef4372-3b20-44b5-b441-85f963e6a25a\") " pod="openstack/openstack-cell1-galera-0" Oct 06 13:55:24 crc kubenswrapper[4757]: I1006 13:55:24.272228 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/39ef4372-3b20-44b5-b441-85f963e6a25a-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"39ef4372-3b20-44b5-b441-85f963e6a25a\") " pod="openstack/openstack-cell1-galera-0" Oct 06 13:55:24 crc kubenswrapper[4757]: I1006 13:55:24.287598 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"openstack-cell1-galera-0\" (UID: \"39ef4372-3b20-44b5-b441-85f963e6a25a\") " pod="openstack/openstack-cell1-galera-0" Oct 06 13:55:24 crc kubenswrapper[4757]: I1006 13:55:24.299721 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j2w4w\" (UniqueName: \"kubernetes.io/projected/39ef4372-3b20-44b5-b441-85f963e6a25a-kube-api-access-j2w4w\") pod \"openstack-cell1-galera-0\" (UID: \"39ef4372-3b20-44b5-b441-85f963e6a25a\") " pod="openstack/openstack-cell1-galera-0" Oct 06 13:55:24 crc kubenswrapper[4757]: I1006 13:55:24.389089 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Oct 06 13:55:24 crc kubenswrapper[4757]: I1006 13:55:24.400912 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Oct 06 13:55:24 crc kubenswrapper[4757]: I1006 13:55:24.401958 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Oct 06 13:55:24 crc kubenswrapper[4757]: I1006 13:55:24.403566 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-b552c" Oct 06 13:55:24 crc kubenswrapper[4757]: I1006 13:55:24.403815 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Oct 06 13:55:24 crc kubenswrapper[4757]: I1006 13:55:24.405708 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-memcached-svc" Oct 06 13:55:24 crc kubenswrapper[4757]: I1006 13:55:24.414845 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Oct 06 13:55:24 crc kubenswrapper[4757]: I1006 13:55:24.567949 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/a21e8c5a-5819-4e8c-9b20-5353625fc36b-kolla-config\") pod \"memcached-0\" (UID: \"a21e8c5a-5819-4e8c-9b20-5353625fc36b\") " pod="openstack/memcached-0" Oct 06 13:55:24 crc kubenswrapper[4757]: I1006 13:55:24.568017 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nmd5w\" (UniqueName: \"kubernetes.io/projected/a21e8c5a-5819-4e8c-9b20-5353625fc36b-kube-api-access-nmd5w\") pod \"memcached-0\" (UID: \"a21e8c5a-5819-4e8c-9b20-5353625fc36b\") " pod="openstack/memcached-0" Oct 06 13:55:24 crc kubenswrapper[4757]: I1006 13:55:24.568058 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a21e8c5a-5819-4e8c-9b20-5353625fc36b-config-data\") pod \"memcached-0\" (UID: \"a21e8c5a-5819-4e8c-9b20-5353625fc36b\") " pod="openstack/memcached-0" Oct 06 13:55:24 crc kubenswrapper[4757]: I1006 13:55:24.568079 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/a21e8c5a-5819-4e8c-9b20-5353625fc36b-memcached-tls-certs\") pod \"memcached-0\" (UID: \"a21e8c5a-5819-4e8c-9b20-5353625fc36b\") " pod="openstack/memcached-0" Oct 06 13:55:24 crc kubenswrapper[4757]: I1006 13:55:24.568122 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a21e8c5a-5819-4e8c-9b20-5353625fc36b-combined-ca-bundle\") pod \"memcached-0\" (UID: \"a21e8c5a-5819-4e8c-9b20-5353625fc36b\") " pod="openstack/memcached-0" Oct 06 13:55:24 crc kubenswrapper[4757]: I1006 13:55:24.668887 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a21e8c5a-5819-4e8c-9b20-5353625fc36b-config-data\") pod \"memcached-0\" (UID: \"a21e8c5a-5819-4e8c-9b20-5353625fc36b\") " pod="openstack/memcached-0" Oct 06 13:55:24 crc kubenswrapper[4757]: I1006 13:55:24.668938 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/a21e8c5a-5819-4e8c-9b20-5353625fc36b-memcached-tls-certs\") pod \"memcached-0\" (UID: \"a21e8c5a-5819-4e8c-9b20-5353625fc36b\") " pod="openstack/memcached-0" Oct 06 13:55:24 crc kubenswrapper[4757]: I1006 13:55:24.668957 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a21e8c5a-5819-4e8c-9b20-5353625fc36b-combined-ca-bundle\") pod \"memcached-0\" (UID: \"a21e8c5a-5819-4e8c-9b20-5353625fc36b\") " pod="openstack/memcached-0" Oct 06 13:55:24 crc kubenswrapper[4757]: I1006 13:55:24.669026 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/a21e8c5a-5819-4e8c-9b20-5353625fc36b-kolla-config\") pod \"memcached-0\" (UID: \"a21e8c5a-5819-4e8c-9b20-5353625fc36b\") " pod="openstack/memcached-0" Oct 06 13:55:24 crc kubenswrapper[4757]: I1006 13:55:24.669066 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nmd5w\" (UniqueName: \"kubernetes.io/projected/a21e8c5a-5819-4e8c-9b20-5353625fc36b-kube-api-access-nmd5w\") pod \"memcached-0\" (UID: \"a21e8c5a-5819-4e8c-9b20-5353625fc36b\") " pod="openstack/memcached-0" Oct 06 13:55:24 crc kubenswrapper[4757]: I1006 13:55:24.669943 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a21e8c5a-5819-4e8c-9b20-5353625fc36b-config-data\") pod \"memcached-0\" (UID: \"a21e8c5a-5819-4e8c-9b20-5353625fc36b\") " pod="openstack/memcached-0" Oct 06 13:55:24 crc kubenswrapper[4757]: I1006 13:55:24.670207 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/a21e8c5a-5819-4e8c-9b20-5353625fc36b-kolla-config\") pod \"memcached-0\" (UID: \"a21e8c5a-5819-4e8c-9b20-5353625fc36b\") " pod="openstack/memcached-0" Oct 06 13:55:24 crc kubenswrapper[4757]: I1006 13:55:24.673227 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a21e8c5a-5819-4e8c-9b20-5353625fc36b-combined-ca-bundle\") pod \"memcached-0\" (UID: \"a21e8c5a-5819-4e8c-9b20-5353625fc36b\") " pod="openstack/memcached-0" Oct 06 13:55:24 crc kubenswrapper[4757]: I1006 13:55:24.681758 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/a21e8c5a-5819-4e8c-9b20-5353625fc36b-memcached-tls-certs\") pod \"memcached-0\" (UID: \"a21e8c5a-5819-4e8c-9b20-5353625fc36b\") " pod="openstack/memcached-0" Oct 06 13:55:24 crc kubenswrapper[4757]: I1006 13:55:24.689747 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nmd5w\" (UniqueName: \"kubernetes.io/projected/a21e8c5a-5819-4e8c-9b20-5353625fc36b-kube-api-access-nmd5w\") pod \"memcached-0\" (UID: \"a21e8c5a-5819-4e8c-9b20-5353625fc36b\") " pod="openstack/memcached-0" Oct 06 13:55:24 crc kubenswrapper[4757]: I1006 13:55:24.722190 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Oct 06 13:55:26 crc kubenswrapper[4757]: I1006 13:55:26.150461 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Oct 06 13:55:26 crc kubenswrapper[4757]: I1006 13:55:26.151770 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 06 13:55:26 crc kubenswrapper[4757]: I1006 13:55:26.155456 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-szzml" Oct 06 13:55:26 crc kubenswrapper[4757]: I1006 13:55:26.166453 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 06 13:55:26 crc kubenswrapper[4757]: I1006 13:55:26.292841 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t67mk\" (UniqueName: \"kubernetes.io/projected/7babda66-5edc-4775-b36d-e22b39689c1c-kube-api-access-t67mk\") pod \"kube-state-metrics-0\" (UID: \"7babda66-5edc-4775-b36d-e22b39689c1c\") " pod="openstack/kube-state-metrics-0" Oct 06 13:55:26 crc kubenswrapper[4757]: I1006 13:55:26.394588 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t67mk\" (UniqueName: \"kubernetes.io/projected/7babda66-5edc-4775-b36d-e22b39689c1c-kube-api-access-t67mk\") pod \"kube-state-metrics-0\" (UID: \"7babda66-5edc-4775-b36d-e22b39689c1c\") " pod="openstack/kube-state-metrics-0" Oct 06 13:55:26 crc kubenswrapper[4757]: I1006 13:55:26.423655 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t67mk\" (UniqueName: \"kubernetes.io/projected/7babda66-5edc-4775-b36d-e22b39689c1c-kube-api-access-t67mk\") pod \"kube-state-metrics-0\" (UID: \"7babda66-5edc-4775-b36d-e22b39689c1c\") " pod="openstack/kube-state-metrics-0" Oct 06 13:55:26 crc kubenswrapper[4757]: I1006 13:55:26.478286 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 06 13:55:27 crc kubenswrapper[4757]: W1006 13:55:27.059016 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1df6a3ef_a968_4f91_a58d_4fa75a44130a.slice/crio-09d6532564c2ff39634ab2937bbc90583b737a3d4ee0f0e10b2fe417184b6a7a WatchSource:0}: Error finding container 09d6532564c2ff39634ab2937bbc90583b737a3d4ee0f0e10b2fe417184b6a7a: Status 404 returned error can't find the container with id 09d6532564c2ff39634ab2937bbc90583b737a3d4ee0f0e10b2fe417184b6a7a Oct 06 13:55:27 crc kubenswrapper[4757]: W1006 13:55:27.061010 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podee7e3a27_b811_4424_9619_edb35acb3527.slice/crio-03d63b77e73c3f5fa9e1a52cfd1e38efe3cf0852ed52acdbaa3dec5890647d73 WatchSource:0}: Error finding container 03d63b77e73c3f5fa9e1a52cfd1e38efe3cf0852ed52acdbaa3dec5890647d73: Status 404 returned error can't find the container with id 03d63b77e73c3f5fa9e1a52cfd1e38efe3cf0852ed52acdbaa3dec5890647d73 Oct 06 13:55:27 crc kubenswrapper[4757]: I1006 13:55:27.130030 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74b957b89f-7vw7l" event={"ID":"1df6a3ef-a968-4f91-a58d-4fa75a44130a","Type":"ContainerStarted","Data":"09d6532564c2ff39634ab2937bbc90583b737a3d4ee0f0e10b2fe417184b6a7a"} Oct 06 13:55:27 crc kubenswrapper[4757]: I1006 13:55:27.131188 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-549d5b8c5c-xm87j" event={"ID":"ee7e3a27-b811-4424-9619-edb35acb3527","Type":"ContainerStarted","Data":"03d63b77e73c3f5fa9e1a52cfd1e38efe3cf0852ed52acdbaa3dec5890647d73"} Oct 06 13:55:30 crc kubenswrapper[4757]: I1006 13:55:30.963119 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Oct 06 13:55:30 crc kubenswrapper[4757]: I1006 13:55:30.964949 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Oct 06 13:55:30 crc kubenswrapper[4757]: I1006 13:55:30.968236 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Oct 06 13:55:30 crc kubenswrapper[4757]: I1006 13:55:30.968713 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-nb-ovndbs" Oct 06 13:55:30 crc kubenswrapper[4757]: I1006 13:55:30.968859 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovn-metrics" Oct 06 13:55:30 crc kubenswrapper[4757]: I1006 13:55:30.969073 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Oct 06 13:55:30 crc kubenswrapper[4757]: I1006 13:55:30.969227 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-9dmgl" Oct 06 13:55:30 crc kubenswrapper[4757]: I1006 13:55:30.973962 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Oct 06 13:55:31 crc kubenswrapper[4757]: I1006 13:55:31.063162 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/47e3132d-b23b-47f2-b26e-5511df70deec-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"47e3132d-b23b-47f2-b26e-5511df70deec\") " pod="openstack/ovsdbserver-nb-0" Oct 06 13:55:31 crc kubenswrapper[4757]: I1006 13:55:31.063213 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/47e3132d-b23b-47f2-b26e-5511df70deec-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"47e3132d-b23b-47f2-b26e-5511df70deec\") " pod="openstack/ovsdbserver-nb-0" Oct 06 13:55:31 crc kubenswrapper[4757]: I1006 13:55:31.063242 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"ovsdbserver-nb-0\" (UID: \"47e3132d-b23b-47f2-b26e-5511df70deec\") " pod="openstack/ovsdbserver-nb-0" Oct 06 13:55:31 crc kubenswrapper[4757]: I1006 13:55:31.063273 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/47e3132d-b23b-47f2-b26e-5511df70deec-config\") pod \"ovsdbserver-nb-0\" (UID: \"47e3132d-b23b-47f2-b26e-5511df70deec\") " pod="openstack/ovsdbserver-nb-0" Oct 06 13:55:31 crc kubenswrapper[4757]: I1006 13:55:31.063296 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/47e3132d-b23b-47f2-b26e-5511df70deec-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"47e3132d-b23b-47f2-b26e-5511df70deec\") " pod="openstack/ovsdbserver-nb-0" Oct 06 13:55:31 crc kubenswrapper[4757]: I1006 13:55:31.063312 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8kk2g\" (UniqueName: \"kubernetes.io/projected/47e3132d-b23b-47f2-b26e-5511df70deec-kube-api-access-8kk2g\") pod \"ovsdbserver-nb-0\" (UID: \"47e3132d-b23b-47f2-b26e-5511df70deec\") " pod="openstack/ovsdbserver-nb-0" Oct 06 13:55:31 crc kubenswrapper[4757]: I1006 13:55:31.063327 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/47e3132d-b23b-47f2-b26e-5511df70deec-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"47e3132d-b23b-47f2-b26e-5511df70deec\") " pod="openstack/ovsdbserver-nb-0" Oct 06 13:55:31 crc kubenswrapper[4757]: I1006 13:55:31.063370 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/47e3132d-b23b-47f2-b26e-5511df70deec-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"47e3132d-b23b-47f2-b26e-5511df70deec\") " pod="openstack/ovsdbserver-nb-0" Oct 06 13:55:31 crc kubenswrapper[4757]: I1006 13:55:31.164511 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/47e3132d-b23b-47f2-b26e-5511df70deec-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"47e3132d-b23b-47f2-b26e-5511df70deec\") " pod="openstack/ovsdbserver-nb-0" Oct 06 13:55:31 crc kubenswrapper[4757]: I1006 13:55:31.164600 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8kk2g\" (UniqueName: \"kubernetes.io/projected/47e3132d-b23b-47f2-b26e-5511df70deec-kube-api-access-8kk2g\") pod \"ovsdbserver-nb-0\" (UID: \"47e3132d-b23b-47f2-b26e-5511df70deec\") " pod="openstack/ovsdbserver-nb-0" Oct 06 13:55:31 crc kubenswrapper[4757]: I1006 13:55:31.164646 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/47e3132d-b23b-47f2-b26e-5511df70deec-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"47e3132d-b23b-47f2-b26e-5511df70deec\") " pod="openstack/ovsdbserver-nb-0" Oct 06 13:55:31 crc kubenswrapper[4757]: I1006 13:55:31.164780 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/47e3132d-b23b-47f2-b26e-5511df70deec-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"47e3132d-b23b-47f2-b26e-5511df70deec\") " pod="openstack/ovsdbserver-nb-0" Oct 06 13:55:31 crc kubenswrapper[4757]: I1006 13:55:31.164934 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/47e3132d-b23b-47f2-b26e-5511df70deec-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"47e3132d-b23b-47f2-b26e-5511df70deec\") " pod="openstack/ovsdbserver-nb-0" Oct 06 13:55:31 crc kubenswrapper[4757]: I1006 13:55:31.164987 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/47e3132d-b23b-47f2-b26e-5511df70deec-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"47e3132d-b23b-47f2-b26e-5511df70deec\") " pod="openstack/ovsdbserver-nb-0" Oct 06 13:55:31 crc kubenswrapper[4757]: I1006 13:55:31.165059 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"ovsdbserver-nb-0\" (UID: \"47e3132d-b23b-47f2-b26e-5511df70deec\") " pod="openstack/ovsdbserver-nb-0" Oct 06 13:55:31 crc kubenswrapper[4757]: I1006 13:55:31.165179 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/47e3132d-b23b-47f2-b26e-5511df70deec-config\") pod \"ovsdbserver-nb-0\" (UID: \"47e3132d-b23b-47f2-b26e-5511df70deec\") " pod="openstack/ovsdbserver-nb-0" Oct 06 13:55:31 crc kubenswrapper[4757]: I1006 13:55:31.167567 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/47e3132d-b23b-47f2-b26e-5511df70deec-config\") pod \"ovsdbserver-nb-0\" (UID: \"47e3132d-b23b-47f2-b26e-5511df70deec\") " pod="openstack/ovsdbserver-nb-0" Oct 06 13:55:31 crc kubenswrapper[4757]: I1006 13:55:31.167633 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/47e3132d-b23b-47f2-b26e-5511df70deec-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"47e3132d-b23b-47f2-b26e-5511df70deec\") " pod="openstack/ovsdbserver-nb-0" Oct 06 13:55:31 crc kubenswrapper[4757]: I1006 13:55:31.167668 4757 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"ovsdbserver-nb-0\" (UID: \"47e3132d-b23b-47f2-b26e-5511df70deec\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/ovsdbserver-nb-0" Oct 06 13:55:31 crc kubenswrapper[4757]: I1006 13:55:31.168979 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/47e3132d-b23b-47f2-b26e-5511df70deec-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"47e3132d-b23b-47f2-b26e-5511df70deec\") " pod="openstack/ovsdbserver-nb-0" Oct 06 13:55:31 crc kubenswrapper[4757]: I1006 13:55:31.177625 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/47e3132d-b23b-47f2-b26e-5511df70deec-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"47e3132d-b23b-47f2-b26e-5511df70deec\") " pod="openstack/ovsdbserver-nb-0" Oct 06 13:55:31 crc kubenswrapper[4757]: I1006 13:55:31.179939 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/47e3132d-b23b-47f2-b26e-5511df70deec-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"47e3132d-b23b-47f2-b26e-5511df70deec\") " pod="openstack/ovsdbserver-nb-0" Oct 06 13:55:31 crc kubenswrapper[4757]: I1006 13:55:31.180750 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/47e3132d-b23b-47f2-b26e-5511df70deec-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"47e3132d-b23b-47f2-b26e-5511df70deec\") " pod="openstack/ovsdbserver-nb-0" Oct 06 13:55:31 crc kubenswrapper[4757]: I1006 13:55:31.185052 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8kk2g\" (UniqueName: \"kubernetes.io/projected/47e3132d-b23b-47f2-b26e-5511df70deec-kube-api-access-8kk2g\") pod \"ovsdbserver-nb-0\" (UID: \"47e3132d-b23b-47f2-b26e-5511df70deec\") " pod="openstack/ovsdbserver-nb-0" Oct 06 13:55:31 crc kubenswrapper[4757]: I1006 13:55:31.217162 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-kn7bx"] Oct 06 13:55:31 crc kubenswrapper[4757]: I1006 13:55:31.217321 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"ovsdbserver-nb-0\" (UID: \"47e3132d-b23b-47f2-b26e-5511df70deec\") " pod="openstack/ovsdbserver-nb-0" Oct 06 13:55:31 crc kubenswrapper[4757]: I1006 13:55:31.219190 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-kn7bx" Oct 06 13:55:31 crc kubenswrapper[4757]: I1006 13:55:31.220971 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-vvdvx" Oct 06 13:55:31 crc kubenswrapper[4757]: I1006 13:55:31.221292 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovncontroller-ovndbs" Oct 06 13:55:31 crc kubenswrapper[4757]: I1006 13:55:31.235014 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Oct 06 13:55:31 crc kubenswrapper[4757]: I1006 13:55:31.240620 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-kn7bx"] Oct 06 13:55:31 crc kubenswrapper[4757]: I1006 13:55:31.246921 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-fqwwx"] Oct 06 13:55:31 crc kubenswrapper[4757]: I1006 13:55:31.251673 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-fqwwx" Oct 06 13:55:31 crc kubenswrapper[4757]: I1006 13:55:31.257774 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-fqwwx"] Oct 06 13:55:31 crc kubenswrapper[4757]: I1006 13:55:31.267645 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/0efda247-fa18-49db-a37d-1dd28d999ed7-ovn-controller-tls-certs\") pod \"ovn-controller-kn7bx\" (UID: \"0efda247-fa18-49db-a37d-1dd28d999ed7\") " pod="openstack/ovn-controller-kn7bx" Oct 06 13:55:31 crc kubenswrapper[4757]: I1006 13:55:31.267690 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/0efda247-fa18-49db-a37d-1dd28d999ed7-var-log-ovn\") pod \"ovn-controller-kn7bx\" (UID: \"0efda247-fa18-49db-a37d-1dd28d999ed7\") " pod="openstack/ovn-controller-kn7bx" Oct 06 13:55:31 crc kubenswrapper[4757]: I1006 13:55:31.267720 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0efda247-fa18-49db-a37d-1dd28d999ed7-combined-ca-bundle\") pod \"ovn-controller-kn7bx\" (UID: \"0efda247-fa18-49db-a37d-1dd28d999ed7\") " pod="openstack/ovn-controller-kn7bx" Oct 06 13:55:31 crc kubenswrapper[4757]: I1006 13:55:31.267776 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/0efda247-fa18-49db-a37d-1dd28d999ed7-var-run-ovn\") pod \"ovn-controller-kn7bx\" (UID: \"0efda247-fa18-49db-a37d-1dd28d999ed7\") " pod="openstack/ovn-controller-kn7bx" Oct 06 13:55:31 crc kubenswrapper[4757]: I1006 13:55:31.267797 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4bd5p\" (UniqueName: \"kubernetes.io/projected/0efda247-fa18-49db-a37d-1dd28d999ed7-kube-api-access-4bd5p\") pod \"ovn-controller-kn7bx\" (UID: \"0efda247-fa18-49db-a37d-1dd28d999ed7\") " pod="openstack/ovn-controller-kn7bx" Oct 06 13:55:31 crc kubenswrapper[4757]: I1006 13:55:31.267846 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/0efda247-fa18-49db-a37d-1dd28d999ed7-var-run\") pod \"ovn-controller-kn7bx\" (UID: \"0efda247-fa18-49db-a37d-1dd28d999ed7\") " pod="openstack/ovn-controller-kn7bx" Oct 06 13:55:31 crc kubenswrapper[4757]: I1006 13:55:31.267897 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0efda247-fa18-49db-a37d-1dd28d999ed7-scripts\") pod \"ovn-controller-kn7bx\" (UID: \"0efda247-fa18-49db-a37d-1dd28d999ed7\") " pod="openstack/ovn-controller-kn7bx" Oct 06 13:55:31 crc kubenswrapper[4757]: I1006 13:55:31.299576 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Oct 06 13:55:31 crc kubenswrapper[4757]: I1006 13:55:31.369509 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/3482c1c4-b15b-46cb-a897-3528fa22adda-var-log\") pod \"ovn-controller-ovs-fqwwx\" (UID: \"3482c1c4-b15b-46cb-a897-3528fa22adda\") " pod="openstack/ovn-controller-ovs-fqwwx" Oct 06 13:55:31 crc kubenswrapper[4757]: I1006 13:55:31.369572 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/0efda247-fa18-49db-a37d-1dd28d999ed7-ovn-controller-tls-certs\") pod \"ovn-controller-kn7bx\" (UID: \"0efda247-fa18-49db-a37d-1dd28d999ed7\") " pod="openstack/ovn-controller-kn7bx" Oct 06 13:55:31 crc kubenswrapper[4757]: I1006 13:55:31.369605 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0efda247-fa18-49db-a37d-1dd28d999ed7-combined-ca-bundle\") pod \"ovn-controller-kn7bx\" (UID: \"0efda247-fa18-49db-a37d-1dd28d999ed7\") " pod="openstack/ovn-controller-kn7bx" Oct 06 13:55:31 crc kubenswrapper[4757]: I1006 13:55:31.369655 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3482c1c4-b15b-46cb-a897-3528fa22adda-scripts\") pod \"ovn-controller-ovs-fqwwx\" (UID: \"3482c1c4-b15b-46cb-a897-3528fa22adda\") " pod="openstack/ovn-controller-ovs-fqwwx" Oct 06 13:55:31 crc kubenswrapper[4757]: I1006 13:55:31.369702 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/3482c1c4-b15b-46cb-a897-3528fa22adda-var-run\") pod \"ovn-controller-ovs-fqwwx\" (UID: \"3482c1c4-b15b-46cb-a897-3528fa22adda\") " pod="openstack/ovn-controller-ovs-fqwwx" Oct 06 13:55:31 crc kubenswrapper[4757]: I1006 13:55:31.369728 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/3482c1c4-b15b-46cb-a897-3528fa22adda-etc-ovs\") pod \"ovn-controller-ovs-fqwwx\" (UID: \"3482c1c4-b15b-46cb-a897-3528fa22adda\") " pod="openstack/ovn-controller-ovs-fqwwx" Oct 06 13:55:31 crc kubenswrapper[4757]: I1006 13:55:31.369753 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0efda247-fa18-49db-a37d-1dd28d999ed7-scripts\") pod \"ovn-controller-kn7bx\" (UID: \"0efda247-fa18-49db-a37d-1dd28d999ed7\") " pod="openstack/ovn-controller-kn7bx" Oct 06 13:55:31 crc kubenswrapper[4757]: I1006 13:55:31.369785 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/3482c1c4-b15b-46cb-a897-3528fa22adda-var-lib\") pod \"ovn-controller-ovs-fqwwx\" (UID: \"3482c1c4-b15b-46cb-a897-3528fa22adda\") " pod="openstack/ovn-controller-ovs-fqwwx" Oct 06 13:55:31 crc kubenswrapper[4757]: I1006 13:55:31.369805 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/0efda247-fa18-49db-a37d-1dd28d999ed7-var-log-ovn\") pod \"ovn-controller-kn7bx\" (UID: \"0efda247-fa18-49db-a37d-1dd28d999ed7\") " pod="openstack/ovn-controller-kn7bx" Oct 06 13:55:31 crc kubenswrapper[4757]: I1006 13:55:31.369842 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/0efda247-fa18-49db-a37d-1dd28d999ed7-var-run-ovn\") pod \"ovn-controller-kn7bx\" (UID: \"0efda247-fa18-49db-a37d-1dd28d999ed7\") " pod="openstack/ovn-controller-kn7bx" Oct 06 13:55:31 crc kubenswrapper[4757]: I1006 13:55:31.369864 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4bd5p\" (UniqueName: \"kubernetes.io/projected/0efda247-fa18-49db-a37d-1dd28d999ed7-kube-api-access-4bd5p\") pod \"ovn-controller-kn7bx\" (UID: \"0efda247-fa18-49db-a37d-1dd28d999ed7\") " pod="openstack/ovn-controller-kn7bx" Oct 06 13:55:31 crc kubenswrapper[4757]: I1006 13:55:31.369908 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/0efda247-fa18-49db-a37d-1dd28d999ed7-var-run\") pod \"ovn-controller-kn7bx\" (UID: \"0efda247-fa18-49db-a37d-1dd28d999ed7\") " pod="openstack/ovn-controller-kn7bx" Oct 06 13:55:31 crc kubenswrapper[4757]: I1006 13:55:31.369933 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wsx5h\" (UniqueName: \"kubernetes.io/projected/3482c1c4-b15b-46cb-a897-3528fa22adda-kube-api-access-wsx5h\") pod \"ovn-controller-ovs-fqwwx\" (UID: \"3482c1c4-b15b-46cb-a897-3528fa22adda\") " pod="openstack/ovn-controller-ovs-fqwwx" Oct 06 13:55:31 crc kubenswrapper[4757]: I1006 13:55:31.370673 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/0efda247-fa18-49db-a37d-1dd28d999ed7-var-log-ovn\") pod \"ovn-controller-kn7bx\" (UID: \"0efda247-fa18-49db-a37d-1dd28d999ed7\") " pod="openstack/ovn-controller-kn7bx" Oct 06 13:55:31 crc kubenswrapper[4757]: I1006 13:55:31.370862 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/0efda247-fa18-49db-a37d-1dd28d999ed7-var-run\") pod \"ovn-controller-kn7bx\" (UID: \"0efda247-fa18-49db-a37d-1dd28d999ed7\") " pod="openstack/ovn-controller-kn7bx" Oct 06 13:55:31 crc kubenswrapper[4757]: I1006 13:55:31.371026 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/0efda247-fa18-49db-a37d-1dd28d999ed7-var-run-ovn\") pod \"ovn-controller-kn7bx\" (UID: \"0efda247-fa18-49db-a37d-1dd28d999ed7\") " pod="openstack/ovn-controller-kn7bx" Oct 06 13:55:31 crc kubenswrapper[4757]: I1006 13:55:31.372113 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0efda247-fa18-49db-a37d-1dd28d999ed7-scripts\") pod \"ovn-controller-kn7bx\" (UID: \"0efda247-fa18-49db-a37d-1dd28d999ed7\") " pod="openstack/ovn-controller-kn7bx" Oct 06 13:55:31 crc kubenswrapper[4757]: I1006 13:55:31.373405 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/0efda247-fa18-49db-a37d-1dd28d999ed7-ovn-controller-tls-certs\") pod \"ovn-controller-kn7bx\" (UID: \"0efda247-fa18-49db-a37d-1dd28d999ed7\") " pod="openstack/ovn-controller-kn7bx" Oct 06 13:55:31 crc kubenswrapper[4757]: I1006 13:55:31.384746 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0efda247-fa18-49db-a37d-1dd28d999ed7-combined-ca-bundle\") pod \"ovn-controller-kn7bx\" (UID: \"0efda247-fa18-49db-a37d-1dd28d999ed7\") " pod="openstack/ovn-controller-kn7bx" Oct 06 13:55:31 crc kubenswrapper[4757]: I1006 13:55:31.385527 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4bd5p\" (UniqueName: \"kubernetes.io/projected/0efda247-fa18-49db-a37d-1dd28d999ed7-kube-api-access-4bd5p\") pod \"ovn-controller-kn7bx\" (UID: \"0efda247-fa18-49db-a37d-1dd28d999ed7\") " pod="openstack/ovn-controller-kn7bx" Oct 06 13:55:31 crc kubenswrapper[4757]: I1006 13:55:31.471510 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/3482c1c4-b15b-46cb-a897-3528fa22adda-var-lib\") pod \"ovn-controller-ovs-fqwwx\" (UID: \"3482c1c4-b15b-46cb-a897-3528fa22adda\") " pod="openstack/ovn-controller-ovs-fqwwx" Oct 06 13:55:31 crc kubenswrapper[4757]: I1006 13:55:31.471711 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wsx5h\" (UniqueName: \"kubernetes.io/projected/3482c1c4-b15b-46cb-a897-3528fa22adda-kube-api-access-wsx5h\") pod \"ovn-controller-ovs-fqwwx\" (UID: \"3482c1c4-b15b-46cb-a897-3528fa22adda\") " pod="openstack/ovn-controller-ovs-fqwwx" Oct 06 13:55:31 crc kubenswrapper[4757]: I1006 13:55:31.471842 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/3482c1c4-b15b-46cb-a897-3528fa22adda-var-log\") pod \"ovn-controller-ovs-fqwwx\" (UID: \"3482c1c4-b15b-46cb-a897-3528fa22adda\") " pod="openstack/ovn-controller-ovs-fqwwx" Oct 06 13:55:31 crc kubenswrapper[4757]: I1006 13:55:31.471996 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/3482c1c4-b15b-46cb-a897-3528fa22adda-var-lib\") pod \"ovn-controller-ovs-fqwwx\" (UID: \"3482c1c4-b15b-46cb-a897-3528fa22adda\") " pod="openstack/ovn-controller-ovs-fqwwx" Oct 06 13:55:31 crc kubenswrapper[4757]: I1006 13:55:31.472136 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/3482c1c4-b15b-46cb-a897-3528fa22adda-var-log\") pod \"ovn-controller-ovs-fqwwx\" (UID: \"3482c1c4-b15b-46cb-a897-3528fa22adda\") " pod="openstack/ovn-controller-ovs-fqwwx" Oct 06 13:55:31 crc kubenswrapper[4757]: I1006 13:55:31.472271 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3482c1c4-b15b-46cb-a897-3528fa22adda-scripts\") pod \"ovn-controller-ovs-fqwwx\" (UID: \"3482c1c4-b15b-46cb-a897-3528fa22adda\") " pod="openstack/ovn-controller-ovs-fqwwx" Oct 06 13:55:31 crc kubenswrapper[4757]: I1006 13:55:31.472339 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/3482c1c4-b15b-46cb-a897-3528fa22adda-var-run\") pod \"ovn-controller-ovs-fqwwx\" (UID: \"3482c1c4-b15b-46cb-a897-3528fa22adda\") " pod="openstack/ovn-controller-ovs-fqwwx" Oct 06 13:55:31 crc kubenswrapper[4757]: I1006 13:55:31.472361 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/3482c1c4-b15b-46cb-a897-3528fa22adda-etc-ovs\") pod \"ovn-controller-ovs-fqwwx\" (UID: \"3482c1c4-b15b-46cb-a897-3528fa22adda\") " pod="openstack/ovn-controller-ovs-fqwwx" Oct 06 13:55:31 crc kubenswrapper[4757]: I1006 13:55:31.472562 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/3482c1c4-b15b-46cb-a897-3528fa22adda-etc-ovs\") pod \"ovn-controller-ovs-fqwwx\" (UID: \"3482c1c4-b15b-46cb-a897-3528fa22adda\") " pod="openstack/ovn-controller-ovs-fqwwx" Oct 06 13:55:31 crc kubenswrapper[4757]: I1006 13:55:31.472952 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/3482c1c4-b15b-46cb-a897-3528fa22adda-var-run\") pod \"ovn-controller-ovs-fqwwx\" (UID: \"3482c1c4-b15b-46cb-a897-3528fa22adda\") " pod="openstack/ovn-controller-ovs-fqwwx" Oct 06 13:55:31 crc kubenswrapper[4757]: I1006 13:55:31.475243 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3482c1c4-b15b-46cb-a897-3528fa22adda-scripts\") pod \"ovn-controller-ovs-fqwwx\" (UID: \"3482c1c4-b15b-46cb-a897-3528fa22adda\") " pod="openstack/ovn-controller-ovs-fqwwx" Oct 06 13:55:31 crc kubenswrapper[4757]: I1006 13:55:31.495034 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wsx5h\" (UniqueName: \"kubernetes.io/projected/3482c1c4-b15b-46cb-a897-3528fa22adda-kube-api-access-wsx5h\") pod \"ovn-controller-ovs-fqwwx\" (UID: \"3482c1c4-b15b-46cb-a897-3528fa22adda\") " pod="openstack/ovn-controller-ovs-fqwwx" Oct 06 13:55:31 crc kubenswrapper[4757]: I1006 13:55:31.579247 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-kn7bx" Oct 06 13:55:31 crc kubenswrapper[4757]: I1006 13:55:31.589815 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-fqwwx" Oct 06 13:55:32 crc kubenswrapper[4757]: I1006 13:55:32.140261 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 06 13:55:32 crc kubenswrapper[4757]: E1006 13:55:32.606055 4757 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:ce327de274aa982f04e61d922c6ca114f1b5ba21ffb78b96ff2bf5b5cd9cfe8d" Oct 06 13:55:32 crc kubenswrapper[4757]: E1006 13:55:32.606546 4757 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:ce327de274aa982f04e61d922c6ca114f1b5ba21ffb78b96ff2bf5b5cd9cfe8d,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:ndfhb5h667h568h584h5f9h58dh565h664h587h597h577h64bh5c4h66fh647hbdh68ch5c5h68dh686h5f7h64hd7hc6h55fh57bh98h57fh87h5fh57fq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-f547c,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-5999f47797-q8xzg_openstack(743e2322-0e83-419c-8866-2fc223be0821): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 06 13:55:32 crc kubenswrapper[4757]: E1006 13:55:32.611279 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-5999f47797-q8xzg" podUID="743e2322-0e83-419c-8866-2fc223be0821" Oct 06 13:55:32 crc kubenswrapper[4757]: E1006 13:55:32.648750 4757 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:ce327de274aa982f04e61d922c6ca114f1b5ba21ffb78b96ff2bf5b5cd9cfe8d" Oct 06 13:55:32 crc kubenswrapper[4757]: E1006 13:55:32.649317 4757 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:ce327de274aa982f04e61d922c6ca114f1b5ba21ffb78b96ff2bf5b5cd9cfe8d,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nffh5bdhf4h5f8h79h55h77h58fh56dh7bh6fh578hbch55dh68h56bhd9h65dh57ch658hc9h566h666h688h58h65dh684h5d7h6ch575h5d6h88q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-cc6hx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-664d9cb979-5k8n7_openstack(cd52b680-148d-4843-b2f7-8cba6eb2d0cb): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 06 13:55:32 crc kubenswrapper[4757]: E1006 13:55:32.650450 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-664d9cb979-5k8n7" podUID="cd52b680-148d-4843-b2f7-8cba6eb2d0cb" Oct 06 13:55:33 crc kubenswrapper[4757]: I1006 13:55:33.117164 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Oct 06 13:55:33 crc kubenswrapper[4757]: I1006 13:55:33.189806 4757 generic.go:334] "Generic (PLEG): container finished" podID="1df6a3ef-a968-4f91-a58d-4fa75a44130a" containerID="d03f3c1a72cbb023719b4eec0bdb3c871ea5430f1e9820868a726fa30b749820" exitCode=0 Oct 06 13:55:33 crc kubenswrapper[4757]: I1006 13:55:33.189865 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74b957b89f-7vw7l" event={"ID":"1df6a3ef-a968-4f91-a58d-4fa75a44130a","Type":"ContainerDied","Data":"d03f3c1a72cbb023719b4eec0bdb3c871ea5430f1e9820868a726fa30b749820"} Oct 06 13:55:33 crc kubenswrapper[4757]: I1006 13:55:33.194785 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61","Type":"ContainerStarted","Data":"75250fb51d6344b2c68698bb3fb1999e519b95bf1c6e8dbeed48644d8ef8b752"} Oct 06 13:55:33 crc kubenswrapper[4757]: I1006 13:55:33.196113 4757 generic.go:334] "Generic (PLEG): container finished" podID="ee7e3a27-b811-4424-9619-edb35acb3527" containerID="5df84abab1caae822870fe7a316c35bbbec727b85562976ead44f3e175906eda" exitCode=0 Oct 06 13:55:33 crc kubenswrapper[4757]: I1006 13:55:33.197073 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-549d5b8c5c-xm87j" event={"ID":"ee7e3a27-b811-4424-9619-edb35acb3527","Type":"ContainerDied","Data":"5df84abab1caae822870fe7a316c35bbbec727b85562976ead44f3e175906eda"} Oct 06 13:55:33 crc kubenswrapper[4757]: W1006 13:55:33.233291 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7babda66_5edc_4775_b36d_e22b39689c1c.slice/crio-24bd1cc377a26b91402fcdbb5b0be65de1eec0830d67411762890802bbdb509c WatchSource:0}: Error finding container 24bd1cc377a26b91402fcdbb5b0be65de1eec0830d67411762890802bbdb509c: Status 404 returned error can't find the container with id 24bd1cc377a26b91402fcdbb5b0be65de1eec0830d67411762890802bbdb509c Oct 06 13:55:33 crc kubenswrapper[4757]: I1006 13:55:33.238169 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Oct 06 13:55:33 crc kubenswrapper[4757]: I1006 13:55:33.250699 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 06 13:55:33 crc kubenswrapper[4757]: I1006 13:55:33.374359 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-kn7bx"] Oct 06 13:55:33 crc kubenswrapper[4757]: I1006 13:55:33.382653 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Oct 06 13:55:33 crc kubenswrapper[4757]: I1006 13:55:33.402671 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 06 13:55:33 crc kubenswrapper[4757]: I1006 13:55:33.492699 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Oct 06 13:55:33 crc kubenswrapper[4757]: W1006 13:55:33.499200 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod47e3132d_b23b_47f2_b26e_5511df70deec.slice/crio-6e0a3c3b86d81999a73e81e0a4b29a852d1c9a0311760b3a102b1cb0c1809da7 WatchSource:0}: Error finding container 6e0a3c3b86d81999a73e81e0a4b29a852d1c9a0311760b3a102b1cb0c1809da7: Status 404 returned error can't find the container with id 6e0a3c3b86d81999a73e81e0a4b29a852d1c9a0311760b3a102b1cb0c1809da7 Oct 06 13:55:33 crc kubenswrapper[4757]: I1006 13:55:33.554788 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5999f47797-q8xzg" Oct 06 13:55:33 crc kubenswrapper[4757]: I1006 13:55:33.622027 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-664d9cb979-5k8n7" Oct 06 13:55:33 crc kubenswrapper[4757]: I1006 13:55:33.636214 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/743e2322-0e83-419c-8866-2fc223be0821-config\") pod \"743e2322-0e83-419c-8866-2fc223be0821\" (UID: \"743e2322-0e83-419c-8866-2fc223be0821\") " Oct 06 13:55:33 crc kubenswrapper[4757]: I1006 13:55:33.636292 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/743e2322-0e83-419c-8866-2fc223be0821-dns-svc\") pod \"743e2322-0e83-419c-8866-2fc223be0821\" (UID: \"743e2322-0e83-419c-8866-2fc223be0821\") " Oct 06 13:55:33 crc kubenswrapper[4757]: I1006 13:55:33.637212 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/743e2322-0e83-419c-8866-2fc223be0821-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "743e2322-0e83-419c-8866-2fc223be0821" (UID: "743e2322-0e83-419c-8866-2fc223be0821"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:55:33 crc kubenswrapper[4757]: I1006 13:55:33.638537 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/743e2322-0e83-419c-8866-2fc223be0821-config" (OuterVolumeSpecName: "config") pod "743e2322-0e83-419c-8866-2fc223be0821" (UID: "743e2322-0e83-419c-8866-2fc223be0821"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:55:33 crc kubenswrapper[4757]: I1006 13:55:33.637033 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f547c\" (UniqueName: \"kubernetes.io/projected/743e2322-0e83-419c-8866-2fc223be0821-kube-api-access-f547c\") pod \"743e2322-0e83-419c-8866-2fc223be0821\" (UID: \"743e2322-0e83-419c-8866-2fc223be0821\") " Oct 06 13:55:33 crc kubenswrapper[4757]: I1006 13:55:33.640233 4757 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/743e2322-0e83-419c-8866-2fc223be0821-config\") on node \"crc\" DevicePath \"\"" Oct 06 13:55:33 crc kubenswrapper[4757]: I1006 13:55:33.640359 4757 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/743e2322-0e83-419c-8866-2fc223be0821-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 06 13:55:33 crc kubenswrapper[4757]: I1006 13:55:33.644467 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/743e2322-0e83-419c-8866-2fc223be0821-kube-api-access-f547c" (OuterVolumeSpecName: "kube-api-access-f547c") pod "743e2322-0e83-419c-8866-2fc223be0821" (UID: "743e2322-0e83-419c-8866-2fc223be0821"). InnerVolumeSpecName "kube-api-access-f547c". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:55:33 crc kubenswrapper[4757]: I1006 13:55:33.741877 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cc6hx\" (UniqueName: \"kubernetes.io/projected/cd52b680-148d-4843-b2f7-8cba6eb2d0cb-kube-api-access-cc6hx\") pod \"cd52b680-148d-4843-b2f7-8cba6eb2d0cb\" (UID: \"cd52b680-148d-4843-b2f7-8cba6eb2d0cb\") " Oct 06 13:55:33 crc kubenswrapper[4757]: I1006 13:55:33.741994 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cd52b680-148d-4843-b2f7-8cba6eb2d0cb-config\") pod \"cd52b680-148d-4843-b2f7-8cba6eb2d0cb\" (UID: \"cd52b680-148d-4843-b2f7-8cba6eb2d0cb\") " Oct 06 13:55:33 crc kubenswrapper[4757]: I1006 13:55:33.742368 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f547c\" (UniqueName: \"kubernetes.io/projected/743e2322-0e83-419c-8866-2fc223be0821-kube-api-access-f547c\") on node \"crc\" DevicePath \"\"" Oct 06 13:55:33 crc kubenswrapper[4757]: I1006 13:55:33.742659 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cd52b680-148d-4843-b2f7-8cba6eb2d0cb-config" (OuterVolumeSpecName: "config") pod "cd52b680-148d-4843-b2f7-8cba6eb2d0cb" (UID: "cd52b680-148d-4843-b2f7-8cba6eb2d0cb"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:55:33 crc kubenswrapper[4757]: I1006 13:55:33.750240 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd52b680-148d-4843-b2f7-8cba6eb2d0cb-kube-api-access-cc6hx" (OuterVolumeSpecName: "kube-api-access-cc6hx") pod "cd52b680-148d-4843-b2f7-8cba6eb2d0cb" (UID: "cd52b680-148d-4843-b2f7-8cba6eb2d0cb"). InnerVolumeSpecName "kube-api-access-cc6hx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:55:33 crc kubenswrapper[4757]: I1006 13:55:33.752900 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Oct 06 13:55:33 crc kubenswrapper[4757]: I1006 13:55:33.756241 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Oct 06 13:55:33 crc kubenswrapper[4757]: I1006 13:55:33.758519 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Oct 06 13:55:33 crc kubenswrapper[4757]: I1006 13:55:33.758698 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-sb-ovndbs" Oct 06 13:55:33 crc kubenswrapper[4757]: I1006 13:55:33.758702 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Oct 06 13:55:33 crc kubenswrapper[4757]: I1006 13:55:33.760748 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-lpfmt" Oct 06 13:55:33 crc kubenswrapper[4757]: I1006 13:55:33.767461 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Oct 06 13:55:33 crc kubenswrapper[4757]: I1006 13:55:33.843843 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/780fa947-0a68-4231-b5b3-e0cad80204d2-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"780fa947-0a68-4231-b5b3-e0cad80204d2\") " pod="openstack/ovsdbserver-sb-0" Oct 06 13:55:33 crc kubenswrapper[4757]: I1006 13:55:33.843896 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/780fa947-0a68-4231-b5b3-e0cad80204d2-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"780fa947-0a68-4231-b5b3-e0cad80204d2\") " pod="openstack/ovsdbserver-sb-0" Oct 06 13:55:33 crc kubenswrapper[4757]: I1006 13:55:33.843918 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-96jg5\" (UniqueName: \"kubernetes.io/projected/780fa947-0a68-4231-b5b3-e0cad80204d2-kube-api-access-96jg5\") pod \"ovsdbserver-sb-0\" (UID: \"780fa947-0a68-4231-b5b3-e0cad80204d2\") " pod="openstack/ovsdbserver-sb-0" Oct 06 13:55:33 crc kubenswrapper[4757]: I1006 13:55:33.843938 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/780fa947-0a68-4231-b5b3-e0cad80204d2-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"780fa947-0a68-4231-b5b3-e0cad80204d2\") " pod="openstack/ovsdbserver-sb-0" Oct 06 13:55:33 crc kubenswrapper[4757]: I1006 13:55:33.843972 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/780fa947-0a68-4231-b5b3-e0cad80204d2-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"780fa947-0a68-4231-b5b3-e0cad80204d2\") " pod="openstack/ovsdbserver-sb-0" Oct 06 13:55:33 crc kubenswrapper[4757]: I1006 13:55:33.843992 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/780fa947-0a68-4231-b5b3-e0cad80204d2-config\") pod \"ovsdbserver-sb-0\" (UID: \"780fa947-0a68-4231-b5b3-e0cad80204d2\") " pod="openstack/ovsdbserver-sb-0" Oct 06 13:55:33 crc kubenswrapper[4757]: I1006 13:55:33.844025 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"ovsdbserver-sb-0\" (UID: \"780fa947-0a68-4231-b5b3-e0cad80204d2\") " pod="openstack/ovsdbserver-sb-0" Oct 06 13:55:33 crc kubenswrapper[4757]: I1006 13:55:33.844238 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/780fa947-0a68-4231-b5b3-e0cad80204d2-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"780fa947-0a68-4231-b5b3-e0cad80204d2\") " pod="openstack/ovsdbserver-sb-0" Oct 06 13:55:33 crc kubenswrapper[4757]: I1006 13:55:33.844540 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cc6hx\" (UniqueName: \"kubernetes.io/projected/cd52b680-148d-4843-b2f7-8cba6eb2d0cb-kube-api-access-cc6hx\") on node \"crc\" DevicePath \"\"" Oct 06 13:55:33 crc kubenswrapper[4757]: I1006 13:55:33.844567 4757 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cd52b680-148d-4843-b2f7-8cba6eb2d0cb-config\") on node \"crc\" DevicePath \"\"" Oct 06 13:55:33 crc kubenswrapper[4757]: I1006 13:55:33.946449 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/780fa947-0a68-4231-b5b3-e0cad80204d2-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"780fa947-0a68-4231-b5b3-e0cad80204d2\") " pod="openstack/ovsdbserver-sb-0" Oct 06 13:55:33 crc kubenswrapper[4757]: I1006 13:55:33.946496 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-96jg5\" (UniqueName: \"kubernetes.io/projected/780fa947-0a68-4231-b5b3-e0cad80204d2-kube-api-access-96jg5\") pod \"ovsdbserver-sb-0\" (UID: \"780fa947-0a68-4231-b5b3-e0cad80204d2\") " pod="openstack/ovsdbserver-sb-0" Oct 06 13:55:33 crc kubenswrapper[4757]: I1006 13:55:33.946523 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/780fa947-0a68-4231-b5b3-e0cad80204d2-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"780fa947-0a68-4231-b5b3-e0cad80204d2\") " pod="openstack/ovsdbserver-sb-0" Oct 06 13:55:33 crc kubenswrapper[4757]: I1006 13:55:33.946874 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/780fa947-0a68-4231-b5b3-e0cad80204d2-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"780fa947-0a68-4231-b5b3-e0cad80204d2\") " pod="openstack/ovsdbserver-sb-0" Oct 06 13:55:33 crc kubenswrapper[4757]: I1006 13:55:33.946910 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/780fa947-0a68-4231-b5b3-e0cad80204d2-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"780fa947-0a68-4231-b5b3-e0cad80204d2\") " pod="openstack/ovsdbserver-sb-0" Oct 06 13:55:33 crc kubenswrapper[4757]: I1006 13:55:33.946928 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/780fa947-0a68-4231-b5b3-e0cad80204d2-config\") pod \"ovsdbserver-sb-0\" (UID: \"780fa947-0a68-4231-b5b3-e0cad80204d2\") " pod="openstack/ovsdbserver-sb-0" Oct 06 13:55:33 crc kubenswrapper[4757]: I1006 13:55:33.946960 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"ovsdbserver-sb-0\" (UID: \"780fa947-0a68-4231-b5b3-e0cad80204d2\") " pod="openstack/ovsdbserver-sb-0" Oct 06 13:55:33 crc kubenswrapper[4757]: I1006 13:55:33.946981 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/780fa947-0a68-4231-b5b3-e0cad80204d2-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"780fa947-0a68-4231-b5b3-e0cad80204d2\") " pod="openstack/ovsdbserver-sb-0" Oct 06 13:55:33 crc kubenswrapper[4757]: I1006 13:55:33.947615 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/780fa947-0a68-4231-b5b3-e0cad80204d2-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"780fa947-0a68-4231-b5b3-e0cad80204d2\") " pod="openstack/ovsdbserver-sb-0" Oct 06 13:55:33 crc kubenswrapper[4757]: I1006 13:55:33.947634 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/780fa947-0a68-4231-b5b3-e0cad80204d2-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"780fa947-0a68-4231-b5b3-e0cad80204d2\") " pod="openstack/ovsdbserver-sb-0" Oct 06 13:55:33 crc kubenswrapper[4757]: I1006 13:55:33.948682 4757 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"ovsdbserver-sb-0\" (UID: \"780fa947-0a68-4231-b5b3-e0cad80204d2\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/ovsdbserver-sb-0" Oct 06 13:55:33 crc kubenswrapper[4757]: I1006 13:55:33.950610 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/780fa947-0a68-4231-b5b3-e0cad80204d2-config\") pod \"ovsdbserver-sb-0\" (UID: \"780fa947-0a68-4231-b5b3-e0cad80204d2\") " pod="openstack/ovsdbserver-sb-0" Oct 06 13:55:33 crc kubenswrapper[4757]: I1006 13:55:33.951512 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/780fa947-0a68-4231-b5b3-e0cad80204d2-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"780fa947-0a68-4231-b5b3-e0cad80204d2\") " pod="openstack/ovsdbserver-sb-0" Oct 06 13:55:33 crc kubenswrapper[4757]: I1006 13:55:33.952346 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/780fa947-0a68-4231-b5b3-e0cad80204d2-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"780fa947-0a68-4231-b5b3-e0cad80204d2\") " pod="openstack/ovsdbserver-sb-0" Oct 06 13:55:33 crc kubenswrapper[4757]: I1006 13:55:33.959363 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/780fa947-0a68-4231-b5b3-e0cad80204d2-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"780fa947-0a68-4231-b5b3-e0cad80204d2\") " pod="openstack/ovsdbserver-sb-0" Oct 06 13:55:33 crc kubenswrapper[4757]: I1006 13:55:33.961637 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-96jg5\" (UniqueName: \"kubernetes.io/projected/780fa947-0a68-4231-b5b3-e0cad80204d2-kube-api-access-96jg5\") pod \"ovsdbserver-sb-0\" (UID: \"780fa947-0a68-4231-b5b3-e0cad80204d2\") " pod="openstack/ovsdbserver-sb-0" Oct 06 13:55:33 crc kubenswrapper[4757]: I1006 13:55:33.970253 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"ovsdbserver-sb-0\" (UID: \"780fa947-0a68-4231-b5b3-e0cad80204d2\") " pod="openstack/ovsdbserver-sb-0" Oct 06 13:55:34 crc kubenswrapper[4757]: I1006 13:55:34.083538 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Oct 06 13:55:34 crc kubenswrapper[4757]: I1006 13:55:34.229202 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5999f47797-q8xzg" Oct 06 13:55:34 crc kubenswrapper[4757]: I1006 13:55:34.232532 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5999f47797-q8xzg" event={"ID":"743e2322-0e83-419c-8866-2fc223be0821","Type":"ContainerDied","Data":"acdc8c22b03a823079bbf9f6c9c793e9f3101c2527ef13fe99a514be99565f6f"} Oct 06 13:55:34 crc kubenswrapper[4757]: I1006 13:55:34.235081 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74b957b89f-7vw7l" event={"ID":"1df6a3ef-a968-4f91-a58d-4fa75a44130a","Type":"ContainerStarted","Data":"b4caf5474c162c7d3ccc48ab1945ea546acd05b6cc83be1352912310e8b93cca"} Oct 06 13:55:34 crc kubenswrapper[4757]: I1006 13:55:34.235408 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-74b957b89f-7vw7l" Oct 06 13:55:34 crc kubenswrapper[4757]: I1006 13:55:34.244532 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-664d9cb979-5k8n7" Oct 06 13:55:34 crc kubenswrapper[4757]: I1006 13:55:34.245287 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-664d9cb979-5k8n7" event={"ID":"cd52b680-148d-4843-b2f7-8cba6eb2d0cb","Type":"ContainerDied","Data":"36a1b45588e112b2d029d10f6debff0fdccbad81aeb651b6c31b66d0f54c9aca"} Oct 06 13:55:34 crc kubenswrapper[4757]: I1006 13:55:34.254641 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-74b957b89f-7vw7l" podStartSLOduration=8.539336067 podStartE2EDuration="14.254623229s" podCreationTimestamp="2025-10-06 13:55:20 +0000 UTC" firstStartedPulling="2025-10-06 13:55:27.071993377 +0000 UTC m=+1015.569311914" lastFinishedPulling="2025-10-06 13:55:32.787280539 +0000 UTC m=+1021.284599076" observedRunningTime="2025-10-06 13:55:34.251665517 +0000 UTC m=+1022.748984064" watchObservedRunningTime="2025-10-06 13:55:34.254623229 +0000 UTC m=+1022.751941766" Oct 06 13:55:34 crc kubenswrapper[4757]: I1006 13:55:34.257664 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"a21e8c5a-5819-4e8c-9b20-5353625fc36b","Type":"ContainerStarted","Data":"d97690835875d33ed098d272fa083111b478485f4b1e3e9f63ee3cc3077f6a86"} Oct 06 13:55:34 crc kubenswrapper[4757]: I1006 13:55:34.259938 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"39ef4372-3b20-44b5-b441-85f963e6a25a","Type":"ContainerStarted","Data":"ca577a70da94d941fb8d276731361db62ddf49abe4ce555e0dc37e838eaa242e"} Oct 06 13:55:34 crc kubenswrapper[4757]: I1006 13:55:34.264159 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"47e3132d-b23b-47f2-b26e-5511df70deec","Type":"ContainerStarted","Data":"6e0a3c3b86d81999a73e81e0a4b29a852d1c9a0311760b3a102b1cb0c1809da7"} Oct 06 13:55:34 crc kubenswrapper[4757]: I1006 13:55:34.267654 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"1489eff7-41ff-420a-bce0-14247f8554ee","Type":"ContainerStarted","Data":"eb11584b190b8ceeb18b079a27a3e4925b216563cf949028e500b4d81a7bb636"} Oct 06 13:55:34 crc kubenswrapper[4757]: I1006 13:55:34.269993 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-549d5b8c5c-xm87j" event={"ID":"ee7e3a27-b811-4424-9619-edb35acb3527","Type":"ContainerStarted","Data":"054067921d37a8aee37d6965f7bafe252c9f68fef8f26289c2c2db6e8765c420"} Oct 06 13:55:34 crc kubenswrapper[4757]: I1006 13:55:34.270379 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-549d5b8c5c-xm87j" Oct 06 13:55:34 crc kubenswrapper[4757]: I1006 13:55:34.273019 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7","Type":"ContainerStarted","Data":"ac927ab9b18957952cba5b66bcedd6426a66d7dcae41ab75d009ce81b7468fe5"} Oct 06 13:55:34 crc kubenswrapper[4757]: I1006 13:55:34.274278 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"7babda66-5edc-4775-b36d-e22b39689c1c","Type":"ContainerStarted","Data":"24bd1cc377a26b91402fcdbb5b0be65de1eec0830d67411762890802bbdb509c"} Oct 06 13:55:34 crc kubenswrapper[4757]: I1006 13:55:34.275589 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-kn7bx" event={"ID":"0efda247-fa18-49db-a37d-1dd28d999ed7","Type":"ContainerStarted","Data":"143cf145148e00a2089644312e7abc33d0e12ff33663c6a61f3f0d83eace14ab"} Oct 06 13:55:34 crc kubenswrapper[4757]: I1006 13:55:34.296325 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-549d5b8c5c-xm87j" podStartSLOduration=8.590991913 podStartE2EDuration="14.296307933s" podCreationTimestamp="2025-10-06 13:55:20 +0000 UTC" firstStartedPulling="2025-10-06 13:55:27.071676407 +0000 UTC m=+1015.568994944" lastFinishedPulling="2025-10-06 13:55:32.776992427 +0000 UTC m=+1021.274310964" observedRunningTime="2025-10-06 13:55:34.289989975 +0000 UTC m=+1022.787308512" watchObservedRunningTime="2025-10-06 13:55:34.296307933 +0000 UTC m=+1022.793626470" Oct 06 13:55:34 crc kubenswrapper[4757]: I1006 13:55:34.326135 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5999f47797-q8xzg"] Oct 06 13:55:34 crc kubenswrapper[4757]: I1006 13:55:34.333036 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5999f47797-q8xzg"] Oct 06 13:55:34 crc kubenswrapper[4757]: I1006 13:55:34.350257 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-664d9cb979-5k8n7"] Oct 06 13:55:34 crc kubenswrapper[4757]: I1006 13:55:34.357637 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-664d9cb979-5k8n7"] Oct 06 13:55:34 crc kubenswrapper[4757]: I1006 13:55:34.438423 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-fqwwx"] Oct 06 13:55:34 crc kubenswrapper[4757]: W1006 13:55:34.697636 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3482c1c4_b15b_46cb_a897_3528fa22adda.slice/crio-9c7a96622871ab7dbf47b63299b8d069d77f24b16e932286a45618252e89ae36 WatchSource:0}: Error finding container 9c7a96622871ab7dbf47b63299b8d069d77f24b16e932286a45618252e89ae36: Status 404 returned error can't find the container with id 9c7a96622871ab7dbf47b63299b8d069d77f24b16e932286a45618252e89ae36 Oct 06 13:55:35 crc kubenswrapper[4757]: I1006 13:55:35.283344 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-fqwwx" event={"ID":"3482c1c4-b15b-46cb-a897-3528fa22adda","Type":"ContainerStarted","Data":"9c7a96622871ab7dbf47b63299b8d069d77f24b16e932286a45618252e89ae36"} Oct 06 13:55:36 crc kubenswrapper[4757]: I1006 13:55:36.191286 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="743e2322-0e83-419c-8866-2fc223be0821" path="/var/lib/kubelet/pods/743e2322-0e83-419c-8866-2fc223be0821/volumes" Oct 06 13:55:36 crc kubenswrapper[4757]: I1006 13:55:36.191913 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd52b680-148d-4843-b2f7-8cba6eb2d0cb" path="/var/lib/kubelet/pods/cd52b680-148d-4843-b2f7-8cba6eb2d0cb/volumes" Oct 06 13:55:37 crc kubenswrapper[4757]: I1006 13:55:37.980202 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-l99t6"] Oct 06 13:55:37 crc kubenswrapper[4757]: I1006 13:55:37.981539 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-l99t6" Oct 06 13:55:37 crc kubenswrapper[4757]: I1006 13:55:37.985361 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Oct 06 13:55:37 crc kubenswrapper[4757]: I1006 13:55:37.989436 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-l99t6"] Oct 06 13:55:38 crc kubenswrapper[4757]: I1006 13:55:38.107634 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-549d5b8c5c-xm87j"] Oct 06 13:55:38 crc kubenswrapper[4757]: I1006 13:55:38.107912 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-549d5b8c5c-xm87j" podUID="ee7e3a27-b811-4424-9619-edb35acb3527" containerName="dnsmasq-dns" containerID="cri-o://054067921d37a8aee37d6965f7bafe252c9f68fef8f26289c2c2db6e8765c420" gracePeriod=10 Oct 06 13:55:38 crc kubenswrapper[4757]: I1006 13:55:38.115200 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-549d5b8c5c-xm87j" Oct 06 13:55:38 crc kubenswrapper[4757]: I1006 13:55:38.116136 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-77flw\" (UniqueName: \"kubernetes.io/projected/f38ce569-0a5c-408d-9d44-bb953d38e24e-kube-api-access-77flw\") pod \"ovn-controller-metrics-l99t6\" (UID: \"f38ce569-0a5c-408d-9d44-bb953d38e24e\") " pod="openstack/ovn-controller-metrics-l99t6" Oct 06 13:55:38 crc kubenswrapper[4757]: I1006 13:55:38.116290 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/f38ce569-0a5c-408d-9d44-bb953d38e24e-ovn-rundir\") pod \"ovn-controller-metrics-l99t6\" (UID: \"f38ce569-0a5c-408d-9d44-bb953d38e24e\") " pod="openstack/ovn-controller-metrics-l99t6" Oct 06 13:55:38 crc kubenswrapper[4757]: I1006 13:55:38.116318 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/f38ce569-0a5c-408d-9d44-bb953d38e24e-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-l99t6\" (UID: \"f38ce569-0a5c-408d-9d44-bb953d38e24e\") " pod="openstack/ovn-controller-metrics-l99t6" Oct 06 13:55:38 crc kubenswrapper[4757]: I1006 13:55:38.116412 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/f38ce569-0a5c-408d-9d44-bb953d38e24e-ovs-rundir\") pod \"ovn-controller-metrics-l99t6\" (UID: \"f38ce569-0a5c-408d-9d44-bb953d38e24e\") " pod="openstack/ovn-controller-metrics-l99t6" Oct 06 13:55:38 crc kubenswrapper[4757]: I1006 13:55:38.116435 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f38ce569-0a5c-408d-9d44-bb953d38e24e-config\") pod \"ovn-controller-metrics-l99t6\" (UID: \"f38ce569-0a5c-408d-9d44-bb953d38e24e\") " pod="openstack/ovn-controller-metrics-l99t6" Oct 06 13:55:38 crc kubenswrapper[4757]: I1006 13:55:38.116456 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f38ce569-0a5c-408d-9d44-bb953d38e24e-combined-ca-bundle\") pod \"ovn-controller-metrics-l99t6\" (UID: \"f38ce569-0a5c-408d-9d44-bb953d38e24e\") " pod="openstack/ovn-controller-metrics-l99t6" Oct 06 13:55:38 crc kubenswrapper[4757]: I1006 13:55:38.137442 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-65549948b5-v2fm9"] Oct 06 13:55:38 crc kubenswrapper[4757]: I1006 13:55:38.143438 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-65549948b5-v2fm9" Oct 06 13:55:38 crc kubenswrapper[4757]: I1006 13:55:38.150285 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Oct 06 13:55:38 crc kubenswrapper[4757]: I1006 13:55:38.168949 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-65549948b5-v2fm9"] Oct 06 13:55:38 crc kubenswrapper[4757]: I1006 13:55:38.218279 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nvqhs\" (UniqueName: \"kubernetes.io/projected/26c6409f-16cc-4f63-9993-2939bcf833fc-kube-api-access-nvqhs\") pod \"dnsmasq-dns-65549948b5-v2fm9\" (UID: \"26c6409f-16cc-4f63-9993-2939bcf833fc\") " pod="openstack/dnsmasq-dns-65549948b5-v2fm9" Oct 06 13:55:38 crc kubenswrapper[4757]: I1006 13:55:38.218332 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/f38ce569-0a5c-408d-9d44-bb953d38e24e-ovn-rundir\") pod \"ovn-controller-metrics-l99t6\" (UID: \"f38ce569-0a5c-408d-9d44-bb953d38e24e\") " pod="openstack/ovn-controller-metrics-l99t6" Oct 06 13:55:38 crc kubenswrapper[4757]: I1006 13:55:38.218359 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/f38ce569-0a5c-408d-9d44-bb953d38e24e-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-l99t6\" (UID: \"f38ce569-0a5c-408d-9d44-bb953d38e24e\") " pod="openstack/ovn-controller-metrics-l99t6" Oct 06 13:55:38 crc kubenswrapper[4757]: I1006 13:55:38.218467 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/f38ce569-0a5c-408d-9d44-bb953d38e24e-ovs-rundir\") pod \"ovn-controller-metrics-l99t6\" (UID: \"f38ce569-0a5c-408d-9d44-bb953d38e24e\") " pod="openstack/ovn-controller-metrics-l99t6" Oct 06 13:55:38 crc kubenswrapper[4757]: I1006 13:55:38.218513 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f38ce569-0a5c-408d-9d44-bb953d38e24e-config\") pod \"ovn-controller-metrics-l99t6\" (UID: \"f38ce569-0a5c-408d-9d44-bb953d38e24e\") " pod="openstack/ovn-controller-metrics-l99t6" Oct 06 13:55:38 crc kubenswrapper[4757]: I1006 13:55:38.218539 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f38ce569-0a5c-408d-9d44-bb953d38e24e-combined-ca-bundle\") pod \"ovn-controller-metrics-l99t6\" (UID: \"f38ce569-0a5c-408d-9d44-bb953d38e24e\") " pod="openstack/ovn-controller-metrics-l99t6" Oct 06 13:55:38 crc kubenswrapper[4757]: I1006 13:55:38.218677 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/f38ce569-0a5c-408d-9d44-bb953d38e24e-ovn-rundir\") pod \"ovn-controller-metrics-l99t6\" (UID: \"f38ce569-0a5c-408d-9d44-bb953d38e24e\") " pod="openstack/ovn-controller-metrics-l99t6" Oct 06 13:55:38 crc kubenswrapper[4757]: I1006 13:55:38.218700 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-77flw\" (UniqueName: \"kubernetes.io/projected/f38ce569-0a5c-408d-9d44-bb953d38e24e-kube-api-access-77flw\") pod \"ovn-controller-metrics-l99t6\" (UID: \"f38ce569-0a5c-408d-9d44-bb953d38e24e\") " pod="openstack/ovn-controller-metrics-l99t6" Oct 06 13:55:38 crc kubenswrapper[4757]: I1006 13:55:38.218795 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/26c6409f-16cc-4f63-9993-2939bcf833fc-dns-svc\") pod \"dnsmasq-dns-65549948b5-v2fm9\" (UID: \"26c6409f-16cc-4f63-9993-2939bcf833fc\") " pod="openstack/dnsmasq-dns-65549948b5-v2fm9" Oct 06 13:55:38 crc kubenswrapper[4757]: I1006 13:55:38.218847 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/26c6409f-16cc-4f63-9993-2939bcf833fc-ovsdbserver-nb\") pod \"dnsmasq-dns-65549948b5-v2fm9\" (UID: \"26c6409f-16cc-4f63-9993-2939bcf833fc\") " pod="openstack/dnsmasq-dns-65549948b5-v2fm9" Oct 06 13:55:38 crc kubenswrapper[4757]: I1006 13:55:38.219112 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/26c6409f-16cc-4f63-9993-2939bcf833fc-config\") pod \"dnsmasq-dns-65549948b5-v2fm9\" (UID: \"26c6409f-16cc-4f63-9993-2939bcf833fc\") " pod="openstack/dnsmasq-dns-65549948b5-v2fm9" Oct 06 13:55:38 crc kubenswrapper[4757]: I1006 13:55:38.219246 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/f38ce569-0a5c-408d-9d44-bb953d38e24e-ovs-rundir\") pod \"ovn-controller-metrics-l99t6\" (UID: \"f38ce569-0a5c-408d-9d44-bb953d38e24e\") " pod="openstack/ovn-controller-metrics-l99t6" Oct 06 13:55:38 crc kubenswrapper[4757]: I1006 13:55:38.220190 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f38ce569-0a5c-408d-9d44-bb953d38e24e-config\") pod \"ovn-controller-metrics-l99t6\" (UID: \"f38ce569-0a5c-408d-9d44-bb953d38e24e\") " pod="openstack/ovn-controller-metrics-l99t6" Oct 06 13:55:38 crc kubenswrapper[4757]: I1006 13:55:38.234061 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/f38ce569-0a5c-408d-9d44-bb953d38e24e-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-l99t6\" (UID: \"f38ce569-0a5c-408d-9d44-bb953d38e24e\") " pod="openstack/ovn-controller-metrics-l99t6" Oct 06 13:55:38 crc kubenswrapper[4757]: I1006 13:55:38.234653 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f38ce569-0a5c-408d-9d44-bb953d38e24e-combined-ca-bundle\") pod \"ovn-controller-metrics-l99t6\" (UID: \"f38ce569-0a5c-408d-9d44-bb953d38e24e\") " pod="openstack/ovn-controller-metrics-l99t6" Oct 06 13:55:38 crc kubenswrapper[4757]: I1006 13:55:38.239739 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-77flw\" (UniqueName: \"kubernetes.io/projected/f38ce569-0a5c-408d-9d44-bb953d38e24e-kube-api-access-77flw\") pod \"ovn-controller-metrics-l99t6\" (UID: \"f38ce569-0a5c-408d-9d44-bb953d38e24e\") " pod="openstack/ovn-controller-metrics-l99t6" Oct 06 13:55:38 crc kubenswrapper[4757]: I1006 13:55:38.306739 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-l99t6" Oct 06 13:55:38 crc kubenswrapper[4757]: I1006 13:55:38.315282 4757 generic.go:334] "Generic (PLEG): container finished" podID="ee7e3a27-b811-4424-9619-edb35acb3527" containerID="054067921d37a8aee37d6965f7bafe252c9f68fef8f26289c2c2db6e8765c420" exitCode=0 Oct 06 13:55:38 crc kubenswrapper[4757]: I1006 13:55:38.315331 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-549d5b8c5c-xm87j" event={"ID":"ee7e3a27-b811-4424-9619-edb35acb3527","Type":"ContainerDied","Data":"054067921d37a8aee37d6965f7bafe252c9f68fef8f26289c2c2db6e8765c420"} Oct 06 13:55:38 crc kubenswrapper[4757]: I1006 13:55:38.320521 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/26c6409f-16cc-4f63-9993-2939bcf833fc-dns-svc\") pod \"dnsmasq-dns-65549948b5-v2fm9\" (UID: \"26c6409f-16cc-4f63-9993-2939bcf833fc\") " pod="openstack/dnsmasq-dns-65549948b5-v2fm9" Oct 06 13:55:38 crc kubenswrapper[4757]: I1006 13:55:38.320586 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/26c6409f-16cc-4f63-9993-2939bcf833fc-ovsdbserver-nb\") pod \"dnsmasq-dns-65549948b5-v2fm9\" (UID: \"26c6409f-16cc-4f63-9993-2939bcf833fc\") " pod="openstack/dnsmasq-dns-65549948b5-v2fm9" Oct 06 13:55:38 crc kubenswrapper[4757]: I1006 13:55:38.320616 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/26c6409f-16cc-4f63-9993-2939bcf833fc-config\") pod \"dnsmasq-dns-65549948b5-v2fm9\" (UID: \"26c6409f-16cc-4f63-9993-2939bcf833fc\") " pod="openstack/dnsmasq-dns-65549948b5-v2fm9" Oct 06 13:55:38 crc kubenswrapper[4757]: I1006 13:55:38.320650 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nvqhs\" (UniqueName: \"kubernetes.io/projected/26c6409f-16cc-4f63-9993-2939bcf833fc-kube-api-access-nvqhs\") pod \"dnsmasq-dns-65549948b5-v2fm9\" (UID: \"26c6409f-16cc-4f63-9993-2939bcf833fc\") " pod="openstack/dnsmasq-dns-65549948b5-v2fm9" Oct 06 13:55:38 crc kubenswrapper[4757]: I1006 13:55:38.321463 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/26c6409f-16cc-4f63-9993-2939bcf833fc-dns-svc\") pod \"dnsmasq-dns-65549948b5-v2fm9\" (UID: \"26c6409f-16cc-4f63-9993-2939bcf833fc\") " pod="openstack/dnsmasq-dns-65549948b5-v2fm9" Oct 06 13:55:38 crc kubenswrapper[4757]: I1006 13:55:38.322836 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/26c6409f-16cc-4f63-9993-2939bcf833fc-ovsdbserver-nb\") pod \"dnsmasq-dns-65549948b5-v2fm9\" (UID: \"26c6409f-16cc-4f63-9993-2939bcf833fc\") " pod="openstack/dnsmasq-dns-65549948b5-v2fm9" Oct 06 13:55:38 crc kubenswrapper[4757]: I1006 13:55:38.322887 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/26c6409f-16cc-4f63-9993-2939bcf833fc-config\") pod \"dnsmasq-dns-65549948b5-v2fm9\" (UID: \"26c6409f-16cc-4f63-9993-2939bcf833fc\") " pod="openstack/dnsmasq-dns-65549948b5-v2fm9" Oct 06 13:55:38 crc kubenswrapper[4757]: I1006 13:55:38.338698 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nvqhs\" (UniqueName: \"kubernetes.io/projected/26c6409f-16cc-4f63-9993-2939bcf833fc-kube-api-access-nvqhs\") pod \"dnsmasq-dns-65549948b5-v2fm9\" (UID: \"26c6409f-16cc-4f63-9993-2939bcf833fc\") " pod="openstack/dnsmasq-dns-65549948b5-v2fm9" Oct 06 13:55:38 crc kubenswrapper[4757]: I1006 13:55:38.523873 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-65549948b5-v2fm9" Oct 06 13:55:40 crc kubenswrapper[4757]: I1006 13:55:40.562323 4757 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-549d5b8c5c-xm87j" podUID="ee7e3a27-b811-4424-9619-edb35acb3527" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.101:5353: connect: connection refused" Oct 06 13:55:40 crc kubenswrapper[4757]: I1006 13:55:40.796325 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-74b957b89f-7vw7l" Oct 06 13:55:40 crc kubenswrapper[4757]: I1006 13:55:40.938689 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Oct 06 13:55:41 crc kubenswrapper[4757]: W1006 13:55:41.274683 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod780fa947_0a68_4231_b5b3_e0cad80204d2.slice/crio-b6abf6024cef72c54fb94eb234ce9a63c024a2e35ede681a39f9c72cfba3d310 WatchSource:0}: Error finding container b6abf6024cef72c54fb94eb234ce9a63c024a2e35ede681a39f9c72cfba3d310: Status 404 returned error can't find the container with id b6abf6024cef72c54fb94eb234ce9a63c024a2e35ede681a39f9c72cfba3d310 Oct 06 13:55:41 crc kubenswrapper[4757]: I1006 13:55:41.350873 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-549d5b8c5c-xm87j" event={"ID":"ee7e3a27-b811-4424-9619-edb35acb3527","Type":"ContainerDied","Data":"03d63b77e73c3f5fa9e1a52cfd1e38efe3cf0852ed52acdbaa3dec5890647d73"} Oct 06 13:55:41 crc kubenswrapper[4757]: I1006 13:55:41.350933 4757 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="03d63b77e73c3f5fa9e1a52cfd1e38efe3cf0852ed52acdbaa3dec5890647d73" Oct 06 13:55:41 crc kubenswrapper[4757]: I1006 13:55:41.353258 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"780fa947-0a68-4231-b5b3-e0cad80204d2","Type":"ContainerStarted","Data":"b6abf6024cef72c54fb94eb234ce9a63c024a2e35ede681a39f9c72cfba3d310"} Oct 06 13:55:41 crc kubenswrapper[4757]: I1006 13:55:41.361154 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-549d5b8c5c-xm87j" Oct 06 13:55:41 crc kubenswrapper[4757]: I1006 13:55:41.471325 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f84r2\" (UniqueName: \"kubernetes.io/projected/ee7e3a27-b811-4424-9619-edb35acb3527-kube-api-access-f84r2\") pod \"ee7e3a27-b811-4424-9619-edb35acb3527\" (UID: \"ee7e3a27-b811-4424-9619-edb35acb3527\") " Oct 06 13:55:41 crc kubenswrapper[4757]: I1006 13:55:41.471407 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ee7e3a27-b811-4424-9619-edb35acb3527-config\") pod \"ee7e3a27-b811-4424-9619-edb35acb3527\" (UID: \"ee7e3a27-b811-4424-9619-edb35acb3527\") " Oct 06 13:55:41 crc kubenswrapper[4757]: I1006 13:55:41.471447 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ee7e3a27-b811-4424-9619-edb35acb3527-dns-svc\") pod \"ee7e3a27-b811-4424-9619-edb35acb3527\" (UID: \"ee7e3a27-b811-4424-9619-edb35acb3527\") " Oct 06 13:55:41 crc kubenswrapper[4757]: I1006 13:55:41.476539 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ee7e3a27-b811-4424-9619-edb35acb3527-kube-api-access-f84r2" (OuterVolumeSpecName: "kube-api-access-f84r2") pod "ee7e3a27-b811-4424-9619-edb35acb3527" (UID: "ee7e3a27-b811-4424-9619-edb35acb3527"). InnerVolumeSpecName "kube-api-access-f84r2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:55:41 crc kubenswrapper[4757]: I1006 13:55:41.509608 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ee7e3a27-b811-4424-9619-edb35acb3527-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "ee7e3a27-b811-4424-9619-edb35acb3527" (UID: "ee7e3a27-b811-4424-9619-edb35acb3527"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:55:41 crc kubenswrapper[4757]: I1006 13:55:41.516022 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ee7e3a27-b811-4424-9619-edb35acb3527-config" (OuterVolumeSpecName: "config") pod "ee7e3a27-b811-4424-9619-edb35acb3527" (UID: "ee7e3a27-b811-4424-9619-edb35acb3527"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:55:41 crc kubenswrapper[4757]: I1006 13:55:41.573873 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f84r2\" (UniqueName: \"kubernetes.io/projected/ee7e3a27-b811-4424-9619-edb35acb3527-kube-api-access-f84r2\") on node \"crc\" DevicePath \"\"" Oct 06 13:55:41 crc kubenswrapper[4757]: I1006 13:55:41.574218 4757 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ee7e3a27-b811-4424-9619-edb35acb3527-config\") on node \"crc\" DevicePath \"\"" Oct 06 13:55:41 crc kubenswrapper[4757]: I1006 13:55:41.574232 4757 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ee7e3a27-b811-4424-9619-edb35acb3527-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 06 13:55:41 crc kubenswrapper[4757]: I1006 13:55:41.952736 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-l99t6"] Oct 06 13:55:42 crc kubenswrapper[4757]: I1006 13:55:42.006655 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-65549948b5-v2fm9"] Oct 06 13:55:42 crc kubenswrapper[4757]: W1006 13:55:42.177172 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod26c6409f_16cc_4f63_9993_2939bcf833fc.slice/crio-750d72ff108c63da74d0e8e344de9f988e476127c2945caeb59eda5c6a423166 WatchSource:0}: Error finding container 750d72ff108c63da74d0e8e344de9f988e476127c2945caeb59eda5c6a423166: Status 404 returned error can't find the container with id 750d72ff108c63da74d0e8e344de9f988e476127c2945caeb59eda5c6a423166 Oct 06 13:55:42 crc kubenswrapper[4757]: W1006 13:55:42.186822 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf38ce569_0a5c_408d_9d44_bb953d38e24e.slice/crio-9d1c4d9bc8e983d7d6b4a5c87332d88666e7997a6545ad48697242920e341637 WatchSource:0}: Error finding container 9d1c4d9bc8e983d7d6b4a5c87332d88666e7997a6545ad48697242920e341637: Status 404 returned error can't find the container with id 9d1c4d9bc8e983d7d6b4a5c87332d88666e7997a6545ad48697242920e341637 Oct 06 13:55:42 crc kubenswrapper[4757]: I1006 13:55:42.401690 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"a21e8c5a-5819-4e8c-9b20-5353625fc36b","Type":"ContainerStarted","Data":"ff662bc600af80633dbf9ed1780d1a31e3db1b29ac07bbca32eb0ee8886ef72f"} Oct 06 13:55:42 crc kubenswrapper[4757]: I1006 13:55:42.402749 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Oct 06 13:55:42 crc kubenswrapper[4757]: I1006 13:55:42.416444 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-65549948b5-v2fm9" event={"ID":"26c6409f-16cc-4f63-9993-2939bcf833fc","Type":"ContainerStarted","Data":"750d72ff108c63da74d0e8e344de9f988e476127c2945caeb59eda5c6a423166"} Oct 06 13:55:42 crc kubenswrapper[4757]: I1006 13:55:42.436640 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-549d5b8c5c-xm87j" Oct 06 13:55:42 crc kubenswrapper[4757]: I1006 13:55:42.437939 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-l99t6" event={"ID":"f38ce569-0a5c-408d-9d44-bb953d38e24e","Type":"ContainerStarted","Data":"9d1c4d9bc8e983d7d6b4a5c87332d88666e7997a6545ad48697242920e341637"} Oct 06 13:55:42 crc kubenswrapper[4757]: I1006 13:55:42.509161 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-549d5b8c5c-xm87j"] Oct 06 13:55:42 crc kubenswrapper[4757]: I1006 13:55:42.516039 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-549d5b8c5c-xm87j"] Oct 06 13:55:42 crc kubenswrapper[4757]: I1006 13:55:42.533838 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=10.519762469 podStartE2EDuration="18.533812817s" podCreationTimestamp="2025-10-06 13:55:24 +0000 UTC" firstStartedPulling="2025-10-06 13:55:33.259150153 +0000 UTC m=+1021.756468680" lastFinishedPulling="2025-10-06 13:55:41.273200491 +0000 UTC m=+1029.770519028" observedRunningTime="2025-10-06 13:55:42.526624073 +0000 UTC m=+1031.023942620" watchObservedRunningTime="2025-10-06 13:55:42.533812817 +0000 UTC m=+1031.031131354" Oct 06 13:55:43 crc kubenswrapper[4757]: I1006 13:55:43.451197 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"39ef4372-3b20-44b5-b441-85f963e6a25a","Type":"ContainerStarted","Data":"8b51647808cd10dd44bf57888634b8904b7ecd8ed93207d6c169f832ac168892"} Oct 06 13:55:43 crc kubenswrapper[4757]: I1006 13:55:43.453184 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"1489eff7-41ff-420a-bce0-14247f8554ee","Type":"ContainerStarted","Data":"9567ee2ffcb88c85096696bc683385a2aadc2e9ff047359f743b2e0a41cd15c7"} Oct 06 13:55:43 crc kubenswrapper[4757]: I1006 13:55:43.455027 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"47e3132d-b23b-47f2-b26e-5511df70deec","Type":"ContainerStarted","Data":"199ad9e8a53109edcd91ed95ddd4ba51048d9ad4e3b2ca26b98c845c9f4ddebc"} Oct 06 13:55:44 crc kubenswrapper[4757]: I1006 13:55:44.190703 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ee7e3a27-b811-4424-9619-edb35acb3527" path="/var/lib/kubelet/pods/ee7e3a27-b811-4424-9619-edb35acb3527/volumes" Oct 06 13:55:44 crc kubenswrapper[4757]: I1006 13:55:44.465659 4757 generic.go:334] "Generic (PLEG): container finished" podID="3482c1c4-b15b-46cb-a897-3528fa22adda" containerID="fec952e5b30b12fb3f6ba508c1eda6d8defe1d59bc1dac7aa9b44c741c60a711" exitCode=0 Oct 06 13:55:44 crc kubenswrapper[4757]: I1006 13:55:44.465738 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-fqwwx" event={"ID":"3482c1c4-b15b-46cb-a897-3528fa22adda","Type":"ContainerDied","Data":"fec952e5b30b12fb3f6ba508c1eda6d8defe1d59bc1dac7aa9b44c741c60a711"} Oct 06 13:55:44 crc kubenswrapper[4757]: I1006 13:55:44.467431 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61","Type":"ContainerStarted","Data":"42364390ce15ba7722dd995ec200f051b040d7e11226cb59a9ad77032b3171c4"} Oct 06 13:55:44 crc kubenswrapper[4757]: I1006 13:55:44.468668 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7","Type":"ContainerStarted","Data":"60db55d4ac0f9a0a97f39e30e07ad0326dde67452cb7ced75d9e2b87024ac379"} Oct 06 13:55:44 crc kubenswrapper[4757]: I1006 13:55:44.470193 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"7babda66-5edc-4775-b36d-e22b39689c1c","Type":"ContainerStarted","Data":"7a8008bac063a3f8ad0ed2c579aba104ad4776d0e1f4854a40cc26c2c229a5a9"} Oct 06 13:55:44 crc kubenswrapper[4757]: I1006 13:55:44.470344 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Oct 06 13:55:44 crc kubenswrapper[4757]: I1006 13:55:44.471323 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"780fa947-0a68-4231-b5b3-e0cad80204d2","Type":"ContainerStarted","Data":"7f6ef039c9a56166775bfea66933d67e543e08d8ece459c480259808a1dad4e8"} Oct 06 13:55:44 crc kubenswrapper[4757]: I1006 13:55:44.472468 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-kn7bx" event={"ID":"0efda247-fa18-49db-a37d-1dd28d999ed7","Type":"ContainerStarted","Data":"861e4848c2d8eae37a6ca300adccd71f39a8a6cf0249ed0fa7568d6ab9d744b3"} Oct 06 13:55:44 crc kubenswrapper[4757]: I1006 13:55:44.472946 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-kn7bx" Oct 06 13:55:44 crc kubenswrapper[4757]: I1006 13:55:44.474181 4757 generic.go:334] "Generic (PLEG): container finished" podID="26c6409f-16cc-4f63-9993-2939bcf833fc" containerID="beeccac334e9962a51e2257ae2a4750e75693a94b3b9f55cf637fefd803131c5" exitCode=0 Oct 06 13:55:44 crc kubenswrapper[4757]: I1006 13:55:44.474894 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-65549948b5-v2fm9" event={"ID":"26c6409f-16cc-4f63-9993-2939bcf833fc","Type":"ContainerDied","Data":"beeccac334e9962a51e2257ae2a4750e75693a94b3b9f55cf637fefd803131c5"} Oct 06 13:55:44 crc kubenswrapper[4757]: I1006 13:55:44.525953 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-kn7bx" podStartSLOduration=5.61525252 podStartE2EDuration="13.525936196s" podCreationTimestamp="2025-10-06 13:55:31 +0000 UTC" firstStartedPulling="2025-10-06 13:55:33.396477407 +0000 UTC m=+1021.893795944" lastFinishedPulling="2025-10-06 13:55:41.307161053 +0000 UTC m=+1029.804479620" observedRunningTime="2025-10-06 13:55:44.509842352 +0000 UTC m=+1033.007160889" watchObservedRunningTime="2025-10-06 13:55:44.525936196 +0000 UTC m=+1033.023254733" Oct 06 13:55:44 crc kubenswrapper[4757]: I1006 13:55:44.529503 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=9.047385046 podStartE2EDuration="18.529489376s" podCreationTimestamp="2025-10-06 13:55:26 +0000 UTC" firstStartedPulling="2025-10-06 13:55:33.240256653 +0000 UTC m=+1021.737575190" lastFinishedPulling="2025-10-06 13:55:42.722360983 +0000 UTC m=+1031.219679520" observedRunningTime="2025-10-06 13:55:44.522470037 +0000 UTC m=+1033.019788564" watchObservedRunningTime="2025-10-06 13:55:44.529489376 +0000 UTC m=+1033.026807913" Oct 06 13:55:46 crc kubenswrapper[4757]: I1006 13:55:46.490202 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-l99t6" event={"ID":"f38ce569-0a5c-408d-9d44-bb953d38e24e","Type":"ContainerStarted","Data":"34a280e57324a0c49141921ad2801af530e40f7172ee7808ae2fc674330a1bf6"} Oct 06 13:55:46 crc kubenswrapper[4757]: I1006 13:55:46.495321 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"47e3132d-b23b-47f2-b26e-5511df70deec","Type":"ContainerStarted","Data":"0a01bd55e84626495bc5fb0d7d2194d09355579e800eb0a54e05bb99ed8a7ca6"} Oct 06 13:55:46 crc kubenswrapper[4757]: I1006 13:55:46.498000 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-fqwwx" event={"ID":"3482c1c4-b15b-46cb-a897-3528fa22adda","Type":"ContainerStarted","Data":"ca5e23427b9e45a1476331fc4c8830e07091551016296db6fab1e5fc1115f6e4"} Oct 06 13:55:46 crc kubenswrapper[4757]: I1006 13:55:46.498067 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-fqwwx" event={"ID":"3482c1c4-b15b-46cb-a897-3528fa22adda","Type":"ContainerStarted","Data":"21bc61d474e55f8578f65885857cf7de3c24140a6afe332ad8c23da23b86ee86"} Oct 06 13:55:46 crc kubenswrapper[4757]: I1006 13:55:46.498202 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-fqwwx" Oct 06 13:55:46 crc kubenswrapper[4757]: I1006 13:55:46.504268 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"780fa947-0a68-4231-b5b3-e0cad80204d2","Type":"ContainerStarted","Data":"a12b1909659d9dffd84421e8aa816dc9c5582d96e48bf6ee24c272907e439897"} Oct 06 13:55:46 crc kubenswrapper[4757]: I1006 13:55:46.522499 4757 generic.go:334] "Generic (PLEG): container finished" podID="39ef4372-3b20-44b5-b441-85f963e6a25a" containerID="8b51647808cd10dd44bf57888634b8904b7ecd8ed93207d6c169f832ac168892" exitCode=0 Oct 06 13:55:46 crc kubenswrapper[4757]: I1006 13:55:46.522658 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"39ef4372-3b20-44b5-b441-85f963e6a25a","Type":"ContainerDied","Data":"8b51647808cd10dd44bf57888634b8904b7ecd8ed93207d6c169f832ac168892"} Oct 06 13:55:46 crc kubenswrapper[4757]: I1006 13:55:46.531715 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-65549948b5-v2fm9" event={"ID":"26c6409f-16cc-4f63-9993-2939bcf833fc","Type":"ContainerStarted","Data":"7e9ce2c5dc79a6ca9964da4a722192c483bce71b886e15de31b192bdb6ca7d89"} Oct 06 13:55:46 crc kubenswrapper[4757]: I1006 13:55:46.531752 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-65549948b5-v2fm9" Oct 06 13:55:46 crc kubenswrapper[4757]: I1006 13:55:46.555330 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-l99t6" podStartSLOduration=5.890561952 podStartE2EDuration="9.555307999s" podCreationTimestamp="2025-10-06 13:55:37 +0000 UTC" firstStartedPulling="2025-10-06 13:55:42.20960473 +0000 UTC m=+1030.706923267" lastFinishedPulling="2025-10-06 13:55:45.874350777 +0000 UTC m=+1034.371669314" observedRunningTime="2025-10-06 13:55:46.53902752 +0000 UTC m=+1035.036346067" watchObservedRunningTime="2025-10-06 13:55:46.555307999 +0000 UTC m=+1035.052626536" Oct 06 13:55:46 crc kubenswrapper[4757]: I1006 13:55:46.590292 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=5.218583032 podStartE2EDuration="17.590273342s" podCreationTimestamp="2025-10-06 13:55:29 +0000 UTC" firstStartedPulling="2025-10-06 13:55:33.503639598 +0000 UTC m=+1022.000958135" lastFinishedPulling="2025-10-06 13:55:45.875329908 +0000 UTC m=+1034.372648445" observedRunningTime="2025-10-06 13:55:46.589450637 +0000 UTC m=+1035.086769184" watchObservedRunningTime="2025-10-06 13:55:46.590273342 +0000 UTC m=+1035.087591879" Oct 06 13:55:46 crc kubenswrapper[4757]: I1006 13:55:46.590328 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-fqwwx" Oct 06 13:55:46 crc kubenswrapper[4757]: I1006 13:55:46.725963 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-65549948b5-v2fm9" podStartSLOduration=8.725940294 podStartE2EDuration="8.725940294s" podCreationTimestamp="2025-10-06 13:55:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:55:46.683866519 +0000 UTC m=+1035.181185056" watchObservedRunningTime="2025-10-06 13:55:46.725940294 +0000 UTC m=+1035.223258841" Oct 06 13:55:46 crc kubenswrapper[4757]: I1006 13:55:46.769421 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=10.197913295 podStartE2EDuration="14.769400963s" podCreationTimestamp="2025-10-06 13:55:32 +0000 UTC" firstStartedPulling="2025-10-06 13:55:41.301726223 +0000 UTC m=+1029.799044760" lastFinishedPulling="2025-10-06 13:55:45.873213891 +0000 UTC m=+1034.370532428" observedRunningTime="2025-10-06 13:55:46.739418795 +0000 UTC m=+1035.236737332" watchObservedRunningTime="2025-10-06 13:55:46.769400963 +0000 UTC m=+1035.266719490" Oct 06 13:55:46 crc kubenswrapper[4757]: I1006 13:55:46.772020 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-fqwwx" podStartSLOduration=9.170563614 podStartE2EDuration="15.772006914s" podCreationTimestamp="2025-10-06 13:55:31 +0000 UTC" firstStartedPulling="2025-10-06 13:55:34.700667465 +0000 UTC m=+1023.197986002" lastFinishedPulling="2025-10-06 13:55:41.302110765 +0000 UTC m=+1029.799429302" observedRunningTime="2025-10-06 13:55:46.767825274 +0000 UTC m=+1035.265143811" watchObservedRunningTime="2025-10-06 13:55:46.772006914 +0000 UTC m=+1035.269325451" Oct 06 13:55:47 crc kubenswrapper[4757]: I1006 13:55:47.037421 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-65549948b5-v2fm9"] Oct 06 13:55:47 crc kubenswrapper[4757]: I1006 13:55:47.067134 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-bd9d686b9-ktnrb"] Oct 06 13:55:47 crc kubenswrapper[4757]: E1006 13:55:47.067741 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee7e3a27-b811-4424-9619-edb35acb3527" containerName="dnsmasq-dns" Oct 06 13:55:47 crc kubenswrapper[4757]: I1006 13:55:47.067867 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee7e3a27-b811-4424-9619-edb35acb3527" containerName="dnsmasq-dns" Oct 06 13:55:47 crc kubenswrapper[4757]: E1006 13:55:47.067933 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee7e3a27-b811-4424-9619-edb35acb3527" containerName="init" Oct 06 13:55:47 crc kubenswrapper[4757]: I1006 13:55:47.067990 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee7e3a27-b811-4424-9619-edb35acb3527" containerName="init" Oct 06 13:55:47 crc kubenswrapper[4757]: I1006 13:55:47.068235 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="ee7e3a27-b811-4424-9619-edb35acb3527" containerName="dnsmasq-dns" Oct 06 13:55:47 crc kubenswrapper[4757]: I1006 13:55:47.069271 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bd9d686b9-ktnrb" Oct 06 13:55:47 crc kubenswrapper[4757]: I1006 13:55:47.071375 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Oct 06 13:55:47 crc kubenswrapper[4757]: I1006 13:55:47.077863 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-bd9d686b9-ktnrb"] Oct 06 13:55:47 crc kubenswrapper[4757]: I1006 13:55:47.176514 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2d710dd3-baf3-450d-b165-0ddc6f0844a8-ovsdbserver-sb\") pod \"dnsmasq-dns-bd9d686b9-ktnrb\" (UID: \"2d710dd3-baf3-450d-b165-0ddc6f0844a8\") " pod="openstack/dnsmasq-dns-bd9d686b9-ktnrb" Oct 06 13:55:47 crc kubenswrapper[4757]: I1006 13:55:47.176597 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2d710dd3-baf3-450d-b165-0ddc6f0844a8-config\") pod \"dnsmasq-dns-bd9d686b9-ktnrb\" (UID: \"2d710dd3-baf3-450d-b165-0ddc6f0844a8\") " pod="openstack/dnsmasq-dns-bd9d686b9-ktnrb" Oct 06 13:55:47 crc kubenswrapper[4757]: I1006 13:55:47.176617 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tzjkr\" (UniqueName: \"kubernetes.io/projected/2d710dd3-baf3-450d-b165-0ddc6f0844a8-kube-api-access-tzjkr\") pod \"dnsmasq-dns-bd9d686b9-ktnrb\" (UID: \"2d710dd3-baf3-450d-b165-0ddc6f0844a8\") " pod="openstack/dnsmasq-dns-bd9d686b9-ktnrb" Oct 06 13:55:47 crc kubenswrapper[4757]: I1006 13:55:47.176722 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2d710dd3-baf3-450d-b165-0ddc6f0844a8-dns-svc\") pod \"dnsmasq-dns-bd9d686b9-ktnrb\" (UID: \"2d710dd3-baf3-450d-b165-0ddc6f0844a8\") " pod="openstack/dnsmasq-dns-bd9d686b9-ktnrb" Oct 06 13:55:47 crc kubenswrapper[4757]: I1006 13:55:47.176910 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2d710dd3-baf3-450d-b165-0ddc6f0844a8-ovsdbserver-nb\") pod \"dnsmasq-dns-bd9d686b9-ktnrb\" (UID: \"2d710dd3-baf3-450d-b165-0ddc6f0844a8\") " pod="openstack/dnsmasq-dns-bd9d686b9-ktnrb" Oct 06 13:55:47 crc kubenswrapper[4757]: I1006 13:55:47.278689 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2d710dd3-baf3-450d-b165-0ddc6f0844a8-config\") pod \"dnsmasq-dns-bd9d686b9-ktnrb\" (UID: \"2d710dd3-baf3-450d-b165-0ddc6f0844a8\") " pod="openstack/dnsmasq-dns-bd9d686b9-ktnrb" Oct 06 13:55:47 crc kubenswrapper[4757]: I1006 13:55:47.279015 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tzjkr\" (UniqueName: \"kubernetes.io/projected/2d710dd3-baf3-450d-b165-0ddc6f0844a8-kube-api-access-tzjkr\") pod \"dnsmasq-dns-bd9d686b9-ktnrb\" (UID: \"2d710dd3-baf3-450d-b165-0ddc6f0844a8\") " pod="openstack/dnsmasq-dns-bd9d686b9-ktnrb" Oct 06 13:55:47 crc kubenswrapper[4757]: I1006 13:55:47.279131 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2d710dd3-baf3-450d-b165-0ddc6f0844a8-dns-svc\") pod \"dnsmasq-dns-bd9d686b9-ktnrb\" (UID: \"2d710dd3-baf3-450d-b165-0ddc6f0844a8\") " pod="openstack/dnsmasq-dns-bd9d686b9-ktnrb" Oct 06 13:55:47 crc kubenswrapper[4757]: I1006 13:55:47.279256 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2d710dd3-baf3-450d-b165-0ddc6f0844a8-ovsdbserver-nb\") pod \"dnsmasq-dns-bd9d686b9-ktnrb\" (UID: \"2d710dd3-baf3-450d-b165-0ddc6f0844a8\") " pod="openstack/dnsmasq-dns-bd9d686b9-ktnrb" Oct 06 13:55:47 crc kubenswrapper[4757]: I1006 13:55:47.279382 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2d710dd3-baf3-450d-b165-0ddc6f0844a8-ovsdbserver-sb\") pod \"dnsmasq-dns-bd9d686b9-ktnrb\" (UID: \"2d710dd3-baf3-450d-b165-0ddc6f0844a8\") " pod="openstack/dnsmasq-dns-bd9d686b9-ktnrb" Oct 06 13:55:47 crc kubenswrapper[4757]: I1006 13:55:47.280261 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2d710dd3-baf3-450d-b165-0ddc6f0844a8-ovsdbserver-nb\") pod \"dnsmasq-dns-bd9d686b9-ktnrb\" (UID: \"2d710dd3-baf3-450d-b165-0ddc6f0844a8\") " pod="openstack/dnsmasq-dns-bd9d686b9-ktnrb" Oct 06 13:55:47 crc kubenswrapper[4757]: I1006 13:55:47.279933 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2d710dd3-baf3-450d-b165-0ddc6f0844a8-dns-svc\") pod \"dnsmasq-dns-bd9d686b9-ktnrb\" (UID: \"2d710dd3-baf3-450d-b165-0ddc6f0844a8\") " pod="openstack/dnsmasq-dns-bd9d686b9-ktnrb" Oct 06 13:55:47 crc kubenswrapper[4757]: I1006 13:55:47.280249 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2d710dd3-baf3-450d-b165-0ddc6f0844a8-ovsdbserver-sb\") pod \"dnsmasq-dns-bd9d686b9-ktnrb\" (UID: \"2d710dd3-baf3-450d-b165-0ddc6f0844a8\") " pod="openstack/dnsmasq-dns-bd9d686b9-ktnrb" Oct 06 13:55:47 crc kubenswrapper[4757]: I1006 13:55:47.279639 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2d710dd3-baf3-450d-b165-0ddc6f0844a8-config\") pod \"dnsmasq-dns-bd9d686b9-ktnrb\" (UID: \"2d710dd3-baf3-450d-b165-0ddc6f0844a8\") " pod="openstack/dnsmasq-dns-bd9d686b9-ktnrb" Oct 06 13:55:47 crc kubenswrapper[4757]: I1006 13:55:47.302734 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tzjkr\" (UniqueName: \"kubernetes.io/projected/2d710dd3-baf3-450d-b165-0ddc6f0844a8-kube-api-access-tzjkr\") pod \"dnsmasq-dns-bd9d686b9-ktnrb\" (UID: \"2d710dd3-baf3-450d-b165-0ddc6f0844a8\") " pod="openstack/dnsmasq-dns-bd9d686b9-ktnrb" Oct 06 13:55:47 crc kubenswrapper[4757]: I1006 13:55:47.387319 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bd9d686b9-ktnrb" Oct 06 13:55:47 crc kubenswrapper[4757]: I1006 13:55:47.552651 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"39ef4372-3b20-44b5-b441-85f963e6a25a","Type":"ContainerStarted","Data":"c6ba27d47c99f13d015277533c3fba4500d832d05fa4632f9ba24c71c9d9ec8e"} Oct 06 13:55:47 crc kubenswrapper[4757]: I1006 13:55:47.580659 4757 generic.go:334] "Generic (PLEG): container finished" podID="1489eff7-41ff-420a-bce0-14247f8554ee" containerID="9567ee2ffcb88c85096696bc683385a2aadc2e9ff047359f743b2e0a41cd15c7" exitCode=0 Oct 06 13:55:47 crc kubenswrapper[4757]: I1006 13:55:47.581302 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"1489eff7-41ff-420a-bce0-14247f8554ee","Type":"ContainerDied","Data":"9567ee2ffcb88c85096696bc683385a2aadc2e9ff047359f743b2e0a41cd15c7"} Oct 06 13:55:47 crc kubenswrapper[4757]: I1006 13:55:47.603047 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=16.731231508 podStartE2EDuration="24.603024398s" podCreationTimestamp="2025-10-06 13:55:23 +0000 UTC" firstStartedPulling="2025-10-06 13:55:33.428448177 +0000 UTC m=+1021.925766704" lastFinishedPulling="2025-10-06 13:55:41.300241057 +0000 UTC m=+1029.797559594" observedRunningTime="2025-10-06 13:55:47.595436681 +0000 UTC m=+1036.092755228" watchObservedRunningTime="2025-10-06 13:55:47.603024398 +0000 UTC m=+1036.100342935" Oct 06 13:55:47 crc kubenswrapper[4757]: I1006 13:55:47.929354 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-bd9d686b9-ktnrb"] Oct 06 13:55:48 crc kubenswrapper[4757]: I1006 13:55:48.595001 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"1489eff7-41ff-420a-bce0-14247f8554ee","Type":"ContainerStarted","Data":"68027503c2ce696a23bbe79112eb3c8e940f784eaa625ef9ab155311edc1e2dc"} Oct 06 13:55:48 crc kubenswrapper[4757]: I1006 13:55:48.598369 4757 generic.go:334] "Generic (PLEG): container finished" podID="2d710dd3-baf3-450d-b165-0ddc6f0844a8" containerID="c745d29e281f96025bec4b46baa962dc0fb51150413f2df98fc6b2e401497c13" exitCode=0 Oct 06 13:55:48 crc kubenswrapper[4757]: I1006 13:55:48.598478 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bd9d686b9-ktnrb" event={"ID":"2d710dd3-baf3-450d-b165-0ddc6f0844a8","Type":"ContainerDied","Data":"c745d29e281f96025bec4b46baa962dc0fb51150413f2df98fc6b2e401497c13"} Oct 06 13:55:48 crc kubenswrapper[4757]: I1006 13:55:48.598533 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bd9d686b9-ktnrb" event={"ID":"2d710dd3-baf3-450d-b165-0ddc6f0844a8","Type":"ContainerStarted","Data":"05e110adb9ac1d0ac469df4f885302ba7ce585a7e0903a3cc02f964a54c94a45"} Oct 06 13:55:48 crc kubenswrapper[4757]: I1006 13:55:48.599003 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-65549948b5-v2fm9" podUID="26c6409f-16cc-4f63-9993-2939bcf833fc" containerName="dnsmasq-dns" containerID="cri-o://7e9ce2c5dc79a6ca9964da4a722192c483bce71b886e15de31b192bdb6ca7d89" gracePeriod=10 Oct 06 13:55:48 crc kubenswrapper[4757]: I1006 13:55:48.652205 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=19.195133995 podStartE2EDuration="27.652176905s" podCreationTimestamp="2025-10-06 13:55:21 +0000 UTC" firstStartedPulling="2025-10-06 13:55:33.233454819 +0000 UTC m=+1021.730773356" lastFinishedPulling="2025-10-06 13:55:41.690497729 +0000 UTC m=+1030.187816266" observedRunningTime="2025-10-06 13:55:48.643594691 +0000 UTC m=+1037.140913268" watchObservedRunningTime="2025-10-06 13:55:48.652176905 +0000 UTC m=+1037.149495472" Oct 06 13:55:49 crc kubenswrapper[4757]: I1006 13:55:49.033244 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-65549948b5-v2fm9" Oct 06 13:55:49 crc kubenswrapper[4757]: I1006 13:55:49.084553 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Oct 06 13:55:49 crc kubenswrapper[4757]: I1006 13:55:49.085220 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Oct 06 13:55:49 crc kubenswrapper[4757]: I1006 13:55:49.128953 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Oct 06 13:55:49 crc kubenswrapper[4757]: I1006 13:55:49.212353 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/26c6409f-16cc-4f63-9993-2939bcf833fc-ovsdbserver-nb\") pod \"26c6409f-16cc-4f63-9993-2939bcf833fc\" (UID: \"26c6409f-16cc-4f63-9993-2939bcf833fc\") " Oct 06 13:55:49 crc kubenswrapper[4757]: I1006 13:55:49.212806 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nvqhs\" (UniqueName: \"kubernetes.io/projected/26c6409f-16cc-4f63-9993-2939bcf833fc-kube-api-access-nvqhs\") pod \"26c6409f-16cc-4f63-9993-2939bcf833fc\" (UID: \"26c6409f-16cc-4f63-9993-2939bcf833fc\") " Oct 06 13:55:49 crc kubenswrapper[4757]: I1006 13:55:49.212981 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/26c6409f-16cc-4f63-9993-2939bcf833fc-config\") pod \"26c6409f-16cc-4f63-9993-2939bcf833fc\" (UID: \"26c6409f-16cc-4f63-9993-2939bcf833fc\") " Oct 06 13:55:49 crc kubenswrapper[4757]: I1006 13:55:49.213087 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/26c6409f-16cc-4f63-9993-2939bcf833fc-dns-svc\") pod \"26c6409f-16cc-4f63-9993-2939bcf833fc\" (UID: \"26c6409f-16cc-4f63-9993-2939bcf833fc\") " Oct 06 13:55:49 crc kubenswrapper[4757]: I1006 13:55:49.220653 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/26c6409f-16cc-4f63-9993-2939bcf833fc-kube-api-access-nvqhs" (OuterVolumeSpecName: "kube-api-access-nvqhs") pod "26c6409f-16cc-4f63-9993-2939bcf833fc" (UID: "26c6409f-16cc-4f63-9993-2939bcf833fc"). InnerVolumeSpecName "kube-api-access-nvqhs". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:55:49 crc kubenswrapper[4757]: I1006 13:55:49.260067 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/26c6409f-16cc-4f63-9993-2939bcf833fc-config" (OuterVolumeSpecName: "config") pod "26c6409f-16cc-4f63-9993-2939bcf833fc" (UID: "26c6409f-16cc-4f63-9993-2939bcf833fc"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:55:49 crc kubenswrapper[4757]: I1006 13:55:49.263116 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/26c6409f-16cc-4f63-9993-2939bcf833fc-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "26c6409f-16cc-4f63-9993-2939bcf833fc" (UID: "26c6409f-16cc-4f63-9993-2939bcf833fc"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:55:49 crc kubenswrapper[4757]: I1006 13:55:49.269626 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/26c6409f-16cc-4f63-9993-2939bcf833fc-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "26c6409f-16cc-4f63-9993-2939bcf833fc" (UID: "26c6409f-16cc-4f63-9993-2939bcf833fc"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:55:49 crc kubenswrapper[4757]: I1006 13:55:49.300834 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Oct 06 13:55:49 crc kubenswrapper[4757]: I1006 13:55:49.315578 4757 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/26c6409f-16cc-4f63-9993-2939bcf833fc-config\") on node \"crc\" DevicePath \"\"" Oct 06 13:55:49 crc kubenswrapper[4757]: I1006 13:55:49.315761 4757 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/26c6409f-16cc-4f63-9993-2939bcf833fc-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 06 13:55:49 crc kubenswrapper[4757]: I1006 13:55:49.315855 4757 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/26c6409f-16cc-4f63-9993-2939bcf833fc-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 06 13:55:49 crc kubenswrapper[4757]: I1006 13:55:49.315923 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nvqhs\" (UniqueName: \"kubernetes.io/projected/26c6409f-16cc-4f63-9993-2939bcf833fc-kube-api-access-nvqhs\") on node \"crc\" DevicePath \"\"" Oct 06 13:55:49 crc kubenswrapper[4757]: I1006 13:55:49.362311 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Oct 06 13:55:49 crc kubenswrapper[4757]: I1006 13:55:49.607083 4757 generic.go:334] "Generic (PLEG): container finished" podID="26c6409f-16cc-4f63-9993-2939bcf833fc" containerID="7e9ce2c5dc79a6ca9964da4a722192c483bce71b886e15de31b192bdb6ca7d89" exitCode=0 Oct 06 13:55:49 crc kubenswrapper[4757]: I1006 13:55:49.607161 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-65549948b5-v2fm9" event={"ID":"26c6409f-16cc-4f63-9993-2939bcf833fc","Type":"ContainerDied","Data":"7e9ce2c5dc79a6ca9964da4a722192c483bce71b886e15de31b192bdb6ca7d89"} Oct 06 13:55:49 crc kubenswrapper[4757]: I1006 13:55:49.607192 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-65549948b5-v2fm9" event={"ID":"26c6409f-16cc-4f63-9993-2939bcf833fc","Type":"ContainerDied","Data":"750d72ff108c63da74d0e8e344de9f988e476127c2945caeb59eda5c6a423166"} Oct 06 13:55:49 crc kubenswrapper[4757]: I1006 13:55:49.607208 4757 scope.go:117] "RemoveContainer" containerID="7e9ce2c5dc79a6ca9964da4a722192c483bce71b886e15de31b192bdb6ca7d89" Oct 06 13:55:49 crc kubenswrapper[4757]: I1006 13:55:49.607316 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-65549948b5-v2fm9" Oct 06 13:55:49 crc kubenswrapper[4757]: I1006 13:55:49.614866 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bd9d686b9-ktnrb" event={"ID":"2d710dd3-baf3-450d-b165-0ddc6f0844a8","Type":"ContainerStarted","Data":"d8f64fea3522dcbce04186e095ee33f4b42a2dd2759db510a3170f447382aabe"} Oct 06 13:55:49 crc kubenswrapper[4757]: I1006 13:55:49.615180 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Oct 06 13:55:49 crc kubenswrapper[4757]: I1006 13:55:49.615339 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-bd9d686b9-ktnrb" Oct 06 13:55:49 crc kubenswrapper[4757]: I1006 13:55:49.628690 4757 scope.go:117] "RemoveContainer" containerID="beeccac334e9962a51e2257ae2a4750e75693a94b3b9f55cf637fefd803131c5" Oct 06 13:55:49 crc kubenswrapper[4757]: I1006 13:55:49.639767 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-bd9d686b9-ktnrb" podStartSLOduration=2.639743031 podStartE2EDuration="2.639743031s" podCreationTimestamp="2025-10-06 13:55:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:55:49.632678514 +0000 UTC m=+1038.129997101" watchObservedRunningTime="2025-10-06 13:55:49.639743031 +0000 UTC m=+1038.137061588" Oct 06 13:55:49 crc kubenswrapper[4757]: I1006 13:55:49.660318 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Oct 06 13:55:49 crc kubenswrapper[4757]: I1006 13:55:49.664791 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-65549948b5-v2fm9"] Oct 06 13:55:49 crc kubenswrapper[4757]: I1006 13:55:49.671544 4757 scope.go:117] "RemoveContainer" containerID="7e9ce2c5dc79a6ca9964da4a722192c483bce71b886e15de31b192bdb6ca7d89" Oct 06 13:55:49 crc kubenswrapper[4757]: I1006 13:55:49.674088 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-65549948b5-v2fm9"] Oct 06 13:55:49 crc kubenswrapper[4757]: E1006 13:55:49.690281 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7e9ce2c5dc79a6ca9964da4a722192c483bce71b886e15de31b192bdb6ca7d89\": container with ID starting with 7e9ce2c5dc79a6ca9964da4a722192c483bce71b886e15de31b192bdb6ca7d89 not found: ID does not exist" containerID="7e9ce2c5dc79a6ca9964da4a722192c483bce71b886e15de31b192bdb6ca7d89" Oct 06 13:55:49 crc kubenswrapper[4757]: I1006 13:55:49.690318 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7e9ce2c5dc79a6ca9964da4a722192c483bce71b886e15de31b192bdb6ca7d89"} err="failed to get container status \"7e9ce2c5dc79a6ca9964da4a722192c483bce71b886e15de31b192bdb6ca7d89\": rpc error: code = NotFound desc = could not find container \"7e9ce2c5dc79a6ca9964da4a722192c483bce71b886e15de31b192bdb6ca7d89\": container with ID starting with 7e9ce2c5dc79a6ca9964da4a722192c483bce71b886e15de31b192bdb6ca7d89 not found: ID does not exist" Oct 06 13:55:49 crc kubenswrapper[4757]: I1006 13:55:49.690343 4757 scope.go:117] "RemoveContainer" containerID="beeccac334e9962a51e2257ae2a4750e75693a94b3b9f55cf637fefd803131c5" Oct 06 13:55:49 crc kubenswrapper[4757]: E1006 13:55:49.690779 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"beeccac334e9962a51e2257ae2a4750e75693a94b3b9f55cf637fefd803131c5\": container with ID starting with beeccac334e9962a51e2257ae2a4750e75693a94b3b9f55cf637fefd803131c5 not found: ID does not exist" containerID="beeccac334e9962a51e2257ae2a4750e75693a94b3b9f55cf637fefd803131c5" Oct 06 13:55:49 crc kubenswrapper[4757]: I1006 13:55:49.690794 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"beeccac334e9962a51e2257ae2a4750e75693a94b3b9f55cf637fefd803131c5"} err="failed to get container status \"beeccac334e9962a51e2257ae2a4750e75693a94b3b9f55cf637fefd803131c5\": rpc error: code = NotFound desc = could not find container \"beeccac334e9962a51e2257ae2a4750e75693a94b3b9f55cf637fefd803131c5\": container with ID starting with beeccac334e9962a51e2257ae2a4750e75693a94b3b9f55cf637fefd803131c5 not found: ID does not exist" Oct 06 13:55:49 crc kubenswrapper[4757]: I1006 13:55:49.692238 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Oct 06 13:55:49 crc kubenswrapper[4757]: I1006 13:55:49.737287 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Oct 06 13:55:50 crc kubenswrapper[4757]: I1006 13:55:50.060181 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Oct 06 13:55:50 crc kubenswrapper[4757]: E1006 13:55:50.060921 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="26c6409f-16cc-4f63-9993-2939bcf833fc" containerName="init" Oct 06 13:55:50 crc kubenswrapper[4757]: I1006 13:55:50.060979 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="26c6409f-16cc-4f63-9993-2939bcf833fc" containerName="init" Oct 06 13:55:50 crc kubenswrapper[4757]: E1006 13:55:50.061029 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="26c6409f-16cc-4f63-9993-2939bcf833fc" containerName="dnsmasq-dns" Oct 06 13:55:50 crc kubenswrapper[4757]: I1006 13:55:50.061040 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="26c6409f-16cc-4f63-9993-2939bcf833fc" containerName="dnsmasq-dns" Oct 06 13:55:50 crc kubenswrapper[4757]: I1006 13:55:50.061308 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="26c6409f-16cc-4f63-9993-2939bcf833fc" containerName="dnsmasq-dns" Oct 06 13:55:50 crc kubenswrapper[4757]: I1006 13:55:50.062341 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Oct 06 13:55:50 crc kubenswrapper[4757]: I1006 13:55:50.066629 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovnnorthd-ovndbs" Oct 06 13:55:50 crc kubenswrapper[4757]: I1006 13:55:50.066790 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Oct 06 13:55:50 crc kubenswrapper[4757]: I1006 13:55:50.066813 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-n9zff" Oct 06 13:55:50 crc kubenswrapper[4757]: I1006 13:55:50.067668 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Oct 06 13:55:50 crc kubenswrapper[4757]: I1006 13:55:50.100181 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Oct 06 13:55:50 crc kubenswrapper[4757]: I1006 13:55:50.129226 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/cc01b313-87cb-44f6-9c85-84ae4931e1f6-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"cc01b313-87cb-44f6-9c85-84ae4931e1f6\") " pod="openstack/ovn-northd-0" Oct 06 13:55:50 crc kubenswrapper[4757]: I1006 13:55:50.129277 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/cc01b313-87cb-44f6-9c85-84ae4931e1f6-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"cc01b313-87cb-44f6-9c85-84ae4931e1f6\") " pod="openstack/ovn-northd-0" Oct 06 13:55:50 crc kubenswrapper[4757]: I1006 13:55:50.129328 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f845v\" (UniqueName: \"kubernetes.io/projected/cc01b313-87cb-44f6-9c85-84ae4931e1f6-kube-api-access-f845v\") pod \"ovn-northd-0\" (UID: \"cc01b313-87cb-44f6-9c85-84ae4931e1f6\") " pod="openstack/ovn-northd-0" Oct 06 13:55:50 crc kubenswrapper[4757]: I1006 13:55:50.129470 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/cc01b313-87cb-44f6-9c85-84ae4931e1f6-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"cc01b313-87cb-44f6-9c85-84ae4931e1f6\") " pod="openstack/ovn-northd-0" Oct 06 13:55:50 crc kubenswrapper[4757]: I1006 13:55:50.129757 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/cc01b313-87cb-44f6-9c85-84ae4931e1f6-scripts\") pod \"ovn-northd-0\" (UID: \"cc01b313-87cb-44f6-9c85-84ae4931e1f6\") " pod="openstack/ovn-northd-0" Oct 06 13:55:50 crc kubenswrapper[4757]: I1006 13:55:50.129864 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc01b313-87cb-44f6-9c85-84ae4931e1f6-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"cc01b313-87cb-44f6-9c85-84ae4931e1f6\") " pod="openstack/ovn-northd-0" Oct 06 13:55:50 crc kubenswrapper[4757]: I1006 13:55:50.129931 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cc01b313-87cb-44f6-9c85-84ae4931e1f6-config\") pod \"ovn-northd-0\" (UID: \"cc01b313-87cb-44f6-9c85-84ae4931e1f6\") " pod="openstack/ovn-northd-0" Oct 06 13:55:50 crc kubenswrapper[4757]: I1006 13:55:50.190615 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="26c6409f-16cc-4f63-9993-2939bcf833fc" path="/var/lib/kubelet/pods/26c6409f-16cc-4f63-9993-2939bcf833fc/volumes" Oct 06 13:55:50 crc kubenswrapper[4757]: I1006 13:55:50.231925 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/cc01b313-87cb-44f6-9c85-84ae4931e1f6-scripts\") pod \"ovn-northd-0\" (UID: \"cc01b313-87cb-44f6-9c85-84ae4931e1f6\") " pod="openstack/ovn-northd-0" Oct 06 13:55:50 crc kubenswrapper[4757]: I1006 13:55:50.231991 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc01b313-87cb-44f6-9c85-84ae4931e1f6-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"cc01b313-87cb-44f6-9c85-84ae4931e1f6\") " pod="openstack/ovn-northd-0" Oct 06 13:55:50 crc kubenswrapper[4757]: I1006 13:55:50.232033 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cc01b313-87cb-44f6-9c85-84ae4931e1f6-config\") pod \"ovn-northd-0\" (UID: \"cc01b313-87cb-44f6-9c85-84ae4931e1f6\") " pod="openstack/ovn-northd-0" Oct 06 13:55:50 crc kubenswrapper[4757]: I1006 13:55:50.232058 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/cc01b313-87cb-44f6-9c85-84ae4931e1f6-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"cc01b313-87cb-44f6-9c85-84ae4931e1f6\") " pod="openstack/ovn-northd-0" Oct 06 13:55:50 crc kubenswrapper[4757]: I1006 13:55:50.232083 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/cc01b313-87cb-44f6-9c85-84ae4931e1f6-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"cc01b313-87cb-44f6-9c85-84ae4931e1f6\") " pod="openstack/ovn-northd-0" Oct 06 13:55:50 crc kubenswrapper[4757]: I1006 13:55:50.232143 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f845v\" (UniqueName: \"kubernetes.io/projected/cc01b313-87cb-44f6-9c85-84ae4931e1f6-kube-api-access-f845v\") pod \"ovn-northd-0\" (UID: \"cc01b313-87cb-44f6-9c85-84ae4931e1f6\") " pod="openstack/ovn-northd-0" Oct 06 13:55:50 crc kubenswrapper[4757]: I1006 13:55:50.232186 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/cc01b313-87cb-44f6-9c85-84ae4931e1f6-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"cc01b313-87cb-44f6-9c85-84ae4931e1f6\") " pod="openstack/ovn-northd-0" Oct 06 13:55:50 crc kubenswrapper[4757]: I1006 13:55:50.232845 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/cc01b313-87cb-44f6-9c85-84ae4931e1f6-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"cc01b313-87cb-44f6-9c85-84ae4931e1f6\") " pod="openstack/ovn-northd-0" Oct 06 13:55:50 crc kubenswrapper[4757]: I1006 13:55:50.233286 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/cc01b313-87cb-44f6-9c85-84ae4931e1f6-scripts\") pod \"ovn-northd-0\" (UID: \"cc01b313-87cb-44f6-9c85-84ae4931e1f6\") " pod="openstack/ovn-northd-0" Oct 06 13:55:50 crc kubenswrapper[4757]: I1006 13:55:50.233292 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cc01b313-87cb-44f6-9c85-84ae4931e1f6-config\") pod \"ovn-northd-0\" (UID: \"cc01b313-87cb-44f6-9c85-84ae4931e1f6\") " pod="openstack/ovn-northd-0" Oct 06 13:55:50 crc kubenswrapper[4757]: I1006 13:55:50.236485 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/cc01b313-87cb-44f6-9c85-84ae4931e1f6-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"cc01b313-87cb-44f6-9c85-84ae4931e1f6\") " pod="openstack/ovn-northd-0" Oct 06 13:55:50 crc kubenswrapper[4757]: I1006 13:55:50.236844 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc01b313-87cb-44f6-9c85-84ae4931e1f6-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"cc01b313-87cb-44f6-9c85-84ae4931e1f6\") " pod="openstack/ovn-northd-0" Oct 06 13:55:50 crc kubenswrapper[4757]: I1006 13:55:50.239833 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/cc01b313-87cb-44f6-9c85-84ae4931e1f6-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"cc01b313-87cb-44f6-9c85-84ae4931e1f6\") " pod="openstack/ovn-northd-0" Oct 06 13:55:50 crc kubenswrapper[4757]: I1006 13:55:50.250512 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f845v\" (UniqueName: \"kubernetes.io/projected/cc01b313-87cb-44f6-9c85-84ae4931e1f6-kube-api-access-f845v\") pod \"ovn-northd-0\" (UID: \"cc01b313-87cb-44f6-9c85-84ae4931e1f6\") " pod="openstack/ovn-northd-0" Oct 06 13:55:50 crc kubenswrapper[4757]: I1006 13:55:50.386309 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Oct 06 13:55:50 crc kubenswrapper[4757]: I1006 13:55:50.891107 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Oct 06 13:55:51 crc kubenswrapper[4757]: I1006 13:55:51.636362 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"cc01b313-87cb-44f6-9c85-84ae4931e1f6","Type":"ContainerStarted","Data":"d7338582b225420a21917e687be20d0d1a41b74e3eada643c81f2538c1c6531c"} Oct 06 13:55:52 crc kubenswrapper[4757]: I1006 13:55:52.652336 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"cc01b313-87cb-44f6-9c85-84ae4931e1f6","Type":"ContainerStarted","Data":"b1eadb61598f06991c54612b8621cac74bf4c117422efafbae7b8a42d1721473"} Oct 06 13:55:52 crc kubenswrapper[4757]: I1006 13:55:52.652978 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"cc01b313-87cb-44f6-9c85-84ae4931e1f6","Type":"ContainerStarted","Data":"1c6cdcbf807850c39d917f526c3686bad9485f567af58c814535019523f2074b"} Oct 06 13:55:52 crc kubenswrapper[4757]: I1006 13:55:52.683062 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=1.690863813 podStartE2EDuration="2.683044802s" podCreationTimestamp="2025-10-06 13:55:50 +0000 UTC" firstStartedPulling="2025-10-06 13:55:50.902836378 +0000 UTC m=+1039.400154915" lastFinishedPulling="2025-10-06 13:55:51.895017367 +0000 UTC m=+1040.392335904" observedRunningTime="2025-10-06 13:55:52.681148744 +0000 UTC m=+1041.178467301" watchObservedRunningTime="2025-10-06 13:55:52.683044802 +0000 UTC m=+1041.180363359" Oct 06 13:55:53 crc kubenswrapper[4757]: I1006 13:55:53.332285 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Oct 06 13:55:53 crc kubenswrapper[4757]: I1006 13:55:53.332387 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Oct 06 13:55:53 crc kubenswrapper[4757]: I1006 13:55:53.408617 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Oct 06 13:55:53 crc kubenswrapper[4757]: I1006 13:55:53.664881 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Oct 06 13:55:53 crc kubenswrapper[4757]: I1006 13:55:53.730329 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Oct 06 13:55:54 crc kubenswrapper[4757]: I1006 13:55:54.389358 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Oct 06 13:55:54 crc kubenswrapper[4757]: I1006 13:55:54.389912 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Oct 06 13:55:54 crc kubenswrapper[4757]: I1006 13:55:54.427971 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-7hclp"] Oct 06 13:55:54 crc kubenswrapper[4757]: I1006 13:55:54.429536 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-7hclp" Oct 06 13:55:54 crc kubenswrapper[4757]: I1006 13:55:54.441621 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-7hclp"] Oct 06 13:55:54 crc kubenswrapper[4757]: I1006 13:55:54.494286 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9gh9d\" (UniqueName: \"kubernetes.io/projected/022bc107-84fd-4650-a5a3-533d42aef0f0-kube-api-access-9gh9d\") pod \"keystone-db-create-7hclp\" (UID: \"022bc107-84fd-4650-a5a3-533d42aef0f0\") " pod="openstack/keystone-db-create-7hclp" Oct 06 13:55:54 crc kubenswrapper[4757]: I1006 13:55:54.595480 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9gh9d\" (UniqueName: \"kubernetes.io/projected/022bc107-84fd-4650-a5a3-533d42aef0f0-kube-api-access-9gh9d\") pod \"keystone-db-create-7hclp\" (UID: \"022bc107-84fd-4650-a5a3-533d42aef0f0\") " pod="openstack/keystone-db-create-7hclp" Oct 06 13:55:54 crc kubenswrapper[4757]: I1006 13:55:54.621171 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9gh9d\" (UniqueName: \"kubernetes.io/projected/022bc107-84fd-4650-a5a3-533d42aef0f0-kube-api-access-9gh9d\") pod \"keystone-db-create-7hclp\" (UID: \"022bc107-84fd-4650-a5a3-533d42aef0f0\") " pod="openstack/keystone-db-create-7hclp" Oct 06 13:55:54 crc kubenswrapper[4757]: I1006 13:55:54.747890 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-qwgbm"] Oct 06 13:55:54 crc kubenswrapper[4757]: I1006 13:55:54.749966 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-qwgbm" Oct 06 13:55:54 crc kubenswrapper[4757]: I1006 13:55:54.753031 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-7hclp" Oct 06 13:55:54 crc kubenswrapper[4757]: I1006 13:55:54.755291 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-qwgbm"] Oct 06 13:55:54 crc kubenswrapper[4757]: I1006 13:55:54.799845 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nsxsh\" (UniqueName: \"kubernetes.io/projected/c6437e0a-2b4e-4f14-b7b8-573d464fad02-kube-api-access-nsxsh\") pod \"placement-db-create-qwgbm\" (UID: \"c6437e0a-2b4e-4f14-b7b8-573d464fad02\") " pod="openstack/placement-db-create-qwgbm" Oct 06 13:55:54 crc kubenswrapper[4757]: I1006 13:55:54.902080 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nsxsh\" (UniqueName: \"kubernetes.io/projected/c6437e0a-2b4e-4f14-b7b8-573d464fad02-kube-api-access-nsxsh\") pod \"placement-db-create-qwgbm\" (UID: \"c6437e0a-2b4e-4f14-b7b8-573d464fad02\") " pod="openstack/placement-db-create-qwgbm" Oct 06 13:55:54 crc kubenswrapper[4757]: I1006 13:55:54.925824 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nsxsh\" (UniqueName: \"kubernetes.io/projected/c6437e0a-2b4e-4f14-b7b8-573d464fad02-kube-api-access-nsxsh\") pod \"placement-db-create-qwgbm\" (UID: \"c6437e0a-2b4e-4f14-b7b8-573d464fad02\") " pod="openstack/placement-db-create-qwgbm" Oct 06 13:55:55 crc kubenswrapper[4757]: I1006 13:55:55.066910 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-qwgbm" Oct 06 13:55:55 crc kubenswrapper[4757]: I1006 13:55:55.221137 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-7hclp"] Oct 06 13:55:55 crc kubenswrapper[4757]: W1006 13:55:55.239188 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod022bc107_84fd_4650_a5a3_533d42aef0f0.slice/crio-35f4ccbb1b2425ec0ec61efd26e293fee5942d9147f74101be6989308a885084 WatchSource:0}: Error finding container 35f4ccbb1b2425ec0ec61efd26e293fee5942d9147f74101be6989308a885084: Status 404 returned error can't find the container with id 35f4ccbb1b2425ec0ec61efd26e293fee5942d9147f74101be6989308a885084 Oct 06 13:55:55 crc kubenswrapper[4757]: I1006 13:55:55.445615 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Oct 06 13:55:55 crc kubenswrapper[4757]: I1006 13:55:55.503629 4757 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/openstack-cell1-galera-0" podUID="39ef4372-3b20-44b5-b441-85f963e6a25a" containerName="galera" probeResult="failure" output=< Oct 06 13:55:55 crc kubenswrapper[4757]: wsrep_local_state_comment (Joined) differs from Synced Oct 06 13:55:55 crc kubenswrapper[4757]: > Oct 06 13:55:55 crc kubenswrapper[4757]: I1006 13:55:55.667405 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-qwgbm"] Oct 06 13:55:55 crc kubenswrapper[4757]: I1006 13:55:55.680504 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-qwgbm" event={"ID":"c6437e0a-2b4e-4f14-b7b8-573d464fad02","Type":"ContainerStarted","Data":"386d5255ebc8d010f3fe5c2fea071b1580ee55db07c0851c96ea065fdee7bb79"} Oct 06 13:55:55 crc kubenswrapper[4757]: I1006 13:55:55.681308 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-7hclp" event={"ID":"022bc107-84fd-4650-a5a3-533d42aef0f0","Type":"ContainerStarted","Data":"35f4ccbb1b2425ec0ec61efd26e293fee5942d9147f74101be6989308a885084"} Oct 06 13:55:56 crc kubenswrapper[4757]: I1006 13:55:56.432739 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-bd9d686b9-ktnrb"] Oct 06 13:55:56 crc kubenswrapper[4757]: I1006 13:55:56.441776 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-bd9d686b9-ktnrb" podUID="2d710dd3-baf3-450d-b165-0ddc6f0844a8" containerName="dnsmasq-dns" containerID="cri-o://d8f64fea3522dcbce04186e095ee33f4b42a2dd2759db510a3170f447382aabe" gracePeriod=10 Oct 06 13:55:56 crc kubenswrapper[4757]: I1006 13:55:56.447303 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-bd9d686b9-ktnrb" Oct 06 13:55:56 crc kubenswrapper[4757]: I1006 13:55:56.467998 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-647b75b6c-c9gqf"] Oct 06 13:55:56 crc kubenswrapper[4757]: I1006 13:55:56.476771 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-647b75b6c-c9gqf" Oct 06 13:55:56 crc kubenswrapper[4757]: I1006 13:55:56.494205 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-647b75b6c-c9gqf"] Oct 06 13:55:56 crc kubenswrapper[4757]: I1006 13:55:56.499296 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Oct 06 13:55:56 crc kubenswrapper[4757]: I1006 13:55:56.651954 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7cd83436-1c2f-4506-8505-c21002020f08-ovsdbserver-nb\") pod \"dnsmasq-dns-647b75b6c-c9gqf\" (UID: \"7cd83436-1c2f-4506-8505-c21002020f08\") " pod="openstack/dnsmasq-dns-647b75b6c-c9gqf" Oct 06 13:55:56 crc kubenswrapper[4757]: I1006 13:55:56.652048 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7cd83436-1c2f-4506-8505-c21002020f08-dns-svc\") pod \"dnsmasq-dns-647b75b6c-c9gqf\" (UID: \"7cd83436-1c2f-4506-8505-c21002020f08\") " pod="openstack/dnsmasq-dns-647b75b6c-c9gqf" Oct 06 13:55:56 crc kubenswrapper[4757]: I1006 13:55:56.652130 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rgnnj\" (UniqueName: \"kubernetes.io/projected/7cd83436-1c2f-4506-8505-c21002020f08-kube-api-access-rgnnj\") pod \"dnsmasq-dns-647b75b6c-c9gqf\" (UID: \"7cd83436-1c2f-4506-8505-c21002020f08\") " pod="openstack/dnsmasq-dns-647b75b6c-c9gqf" Oct 06 13:55:56 crc kubenswrapper[4757]: I1006 13:55:56.652158 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7cd83436-1c2f-4506-8505-c21002020f08-ovsdbserver-sb\") pod \"dnsmasq-dns-647b75b6c-c9gqf\" (UID: \"7cd83436-1c2f-4506-8505-c21002020f08\") " pod="openstack/dnsmasq-dns-647b75b6c-c9gqf" Oct 06 13:55:56 crc kubenswrapper[4757]: I1006 13:55:56.652181 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7cd83436-1c2f-4506-8505-c21002020f08-config\") pod \"dnsmasq-dns-647b75b6c-c9gqf\" (UID: \"7cd83436-1c2f-4506-8505-c21002020f08\") " pod="openstack/dnsmasq-dns-647b75b6c-c9gqf" Oct 06 13:55:56 crc kubenswrapper[4757]: I1006 13:55:56.753853 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7cd83436-1c2f-4506-8505-c21002020f08-ovsdbserver-nb\") pod \"dnsmasq-dns-647b75b6c-c9gqf\" (UID: \"7cd83436-1c2f-4506-8505-c21002020f08\") " pod="openstack/dnsmasq-dns-647b75b6c-c9gqf" Oct 06 13:55:56 crc kubenswrapper[4757]: I1006 13:55:56.753906 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7cd83436-1c2f-4506-8505-c21002020f08-dns-svc\") pod \"dnsmasq-dns-647b75b6c-c9gqf\" (UID: \"7cd83436-1c2f-4506-8505-c21002020f08\") " pod="openstack/dnsmasq-dns-647b75b6c-c9gqf" Oct 06 13:55:56 crc kubenswrapper[4757]: I1006 13:55:56.754133 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rgnnj\" (UniqueName: \"kubernetes.io/projected/7cd83436-1c2f-4506-8505-c21002020f08-kube-api-access-rgnnj\") pod \"dnsmasq-dns-647b75b6c-c9gqf\" (UID: \"7cd83436-1c2f-4506-8505-c21002020f08\") " pod="openstack/dnsmasq-dns-647b75b6c-c9gqf" Oct 06 13:55:56 crc kubenswrapper[4757]: I1006 13:55:56.754518 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7cd83436-1c2f-4506-8505-c21002020f08-ovsdbserver-sb\") pod \"dnsmasq-dns-647b75b6c-c9gqf\" (UID: \"7cd83436-1c2f-4506-8505-c21002020f08\") " pod="openstack/dnsmasq-dns-647b75b6c-c9gqf" Oct 06 13:55:56 crc kubenswrapper[4757]: I1006 13:55:56.754559 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7cd83436-1c2f-4506-8505-c21002020f08-config\") pod \"dnsmasq-dns-647b75b6c-c9gqf\" (UID: \"7cd83436-1c2f-4506-8505-c21002020f08\") " pod="openstack/dnsmasq-dns-647b75b6c-c9gqf" Oct 06 13:55:56 crc kubenswrapper[4757]: I1006 13:55:56.754894 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7cd83436-1c2f-4506-8505-c21002020f08-dns-svc\") pod \"dnsmasq-dns-647b75b6c-c9gqf\" (UID: \"7cd83436-1c2f-4506-8505-c21002020f08\") " pod="openstack/dnsmasq-dns-647b75b6c-c9gqf" Oct 06 13:55:56 crc kubenswrapper[4757]: I1006 13:55:56.755117 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7cd83436-1c2f-4506-8505-c21002020f08-ovsdbserver-nb\") pod \"dnsmasq-dns-647b75b6c-c9gqf\" (UID: \"7cd83436-1c2f-4506-8505-c21002020f08\") " pod="openstack/dnsmasq-dns-647b75b6c-c9gqf" Oct 06 13:55:56 crc kubenswrapper[4757]: I1006 13:55:56.755489 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7cd83436-1c2f-4506-8505-c21002020f08-config\") pod \"dnsmasq-dns-647b75b6c-c9gqf\" (UID: \"7cd83436-1c2f-4506-8505-c21002020f08\") " pod="openstack/dnsmasq-dns-647b75b6c-c9gqf" Oct 06 13:55:56 crc kubenswrapper[4757]: I1006 13:55:56.755848 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7cd83436-1c2f-4506-8505-c21002020f08-ovsdbserver-sb\") pod \"dnsmasq-dns-647b75b6c-c9gqf\" (UID: \"7cd83436-1c2f-4506-8505-c21002020f08\") " pod="openstack/dnsmasq-dns-647b75b6c-c9gqf" Oct 06 13:55:56 crc kubenswrapper[4757]: I1006 13:55:56.780496 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rgnnj\" (UniqueName: \"kubernetes.io/projected/7cd83436-1c2f-4506-8505-c21002020f08-kube-api-access-rgnnj\") pod \"dnsmasq-dns-647b75b6c-c9gqf\" (UID: \"7cd83436-1c2f-4506-8505-c21002020f08\") " pod="openstack/dnsmasq-dns-647b75b6c-c9gqf" Oct 06 13:55:56 crc kubenswrapper[4757]: I1006 13:55:56.804257 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-647b75b6c-c9gqf" Oct 06 13:55:57 crc kubenswrapper[4757]: I1006 13:55:57.241639 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-647b75b6c-c9gqf"] Oct 06 13:55:57 crc kubenswrapper[4757]: W1006 13:55:57.245242 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7cd83436_1c2f_4506_8505_c21002020f08.slice/crio-37ddbb752212d758c67e7d208d62c88409189ccd2e2c28d5c570fcbdd7f019bd WatchSource:0}: Error finding container 37ddbb752212d758c67e7d208d62c88409189ccd2e2c28d5c570fcbdd7f019bd: Status 404 returned error can't find the container with id 37ddbb752212d758c67e7d208d62c88409189ccd2e2c28d5c570fcbdd7f019bd Oct 06 13:55:57 crc kubenswrapper[4757]: I1006 13:55:57.389415 4757 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-bd9d686b9-ktnrb" podUID="2d710dd3-baf3-450d-b165-0ddc6f0844a8" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.115:5353: connect: connection refused" Oct 06 13:55:57 crc kubenswrapper[4757]: I1006 13:55:57.569430 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-storage-0"] Oct 06 13:55:57 crc kubenswrapper[4757]: I1006 13:55:57.576784 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Oct 06 13:55:57 crc kubenswrapper[4757]: I1006 13:55:57.580162 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-files" Oct 06 13:55:57 crc kubenswrapper[4757]: I1006 13:55:57.580446 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-conf" Oct 06 13:55:57 crc kubenswrapper[4757]: I1006 13:55:57.581044 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-swift-dockercfg-fbhlz" Oct 06 13:55:57 crc kubenswrapper[4757]: I1006 13:55:57.581159 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-storage-config-data" Oct 06 13:55:57 crc kubenswrapper[4757]: I1006 13:55:57.615082 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Oct 06 13:55:57 crc kubenswrapper[4757]: I1006 13:55:57.699446 4757 generic.go:334] "Generic (PLEG): container finished" podID="2d710dd3-baf3-450d-b165-0ddc6f0844a8" containerID="d8f64fea3522dcbce04186e095ee33f4b42a2dd2759db510a3170f447382aabe" exitCode=0 Oct 06 13:55:57 crc kubenswrapper[4757]: I1006 13:55:57.699482 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bd9d686b9-ktnrb" event={"ID":"2d710dd3-baf3-450d-b165-0ddc6f0844a8","Type":"ContainerDied","Data":"d8f64fea3522dcbce04186e095ee33f4b42a2dd2759db510a3170f447382aabe"} Oct 06 13:55:57 crc kubenswrapper[4757]: I1006 13:55:57.700726 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-647b75b6c-c9gqf" event={"ID":"7cd83436-1c2f-4506-8505-c21002020f08","Type":"ContainerStarted","Data":"37ddbb752212d758c67e7d208d62c88409189ccd2e2c28d5c570fcbdd7f019bd"} Oct 06 13:55:57 crc kubenswrapper[4757]: I1006 13:55:57.770372 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e-cache\") pod \"swift-storage-0\" (UID: \"cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e\") " pod="openstack/swift-storage-0" Oct 06 13:55:57 crc kubenswrapper[4757]: I1006 13:55:57.770416 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e-lock\") pod \"swift-storage-0\" (UID: \"cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e\") " pod="openstack/swift-storage-0" Oct 06 13:55:57 crc kubenswrapper[4757]: I1006 13:55:57.770546 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e-etc-swift\") pod \"swift-storage-0\" (UID: \"cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e\") " pod="openstack/swift-storage-0" Oct 06 13:55:57 crc kubenswrapper[4757]: I1006 13:55:57.770611 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"swift-storage-0\" (UID: \"cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e\") " pod="openstack/swift-storage-0" Oct 06 13:55:57 crc kubenswrapper[4757]: I1006 13:55:57.770665 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6g72g\" (UniqueName: \"kubernetes.io/projected/cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e-kube-api-access-6g72g\") pod \"swift-storage-0\" (UID: \"cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e\") " pod="openstack/swift-storage-0" Oct 06 13:55:57 crc kubenswrapper[4757]: I1006 13:55:57.872498 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e-cache\") pod \"swift-storage-0\" (UID: \"cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e\") " pod="openstack/swift-storage-0" Oct 06 13:55:57 crc kubenswrapper[4757]: I1006 13:55:57.872556 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e-lock\") pod \"swift-storage-0\" (UID: \"cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e\") " pod="openstack/swift-storage-0" Oct 06 13:55:57 crc kubenswrapper[4757]: I1006 13:55:57.872614 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e-etc-swift\") pod \"swift-storage-0\" (UID: \"cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e\") " pod="openstack/swift-storage-0" Oct 06 13:55:57 crc kubenswrapper[4757]: I1006 13:55:57.872649 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"swift-storage-0\" (UID: \"cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e\") " pod="openstack/swift-storage-0" Oct 06 13:55:57 crc kubenswrapper[4757]: I1006 13:55:57.872695 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6g72g\" (UniqueName: \"kubernetes.io/projected/cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e-kube-api-access-6g72g\") pod \"swift-storage-0\" (UID: \"cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e\") " pod="openstack/swift-storage-0" Oct 06 13:55:57 crc kubenswrapper[4757]: E1006 13:55:57.873121 4757 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Oct 06 13:55:57 crc kubenswrapper[4757]: E1006 13:55:57.873138 4757 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Oct 06 13:55:57 crc kubenswrapper[4757]: E1006 13:55:57.873175 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e-etc-swift podName:cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e nodeName:}" failed. No retries permitted until 2025-10-06 13:55:58.37315954 +0000 UTC m=+1046.870478077 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e-etc-swift") pod "swift-storage-0" (UID: "cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e") : configmap "swift-ring-files" not found Oct 06 13:55:57 crc kubenswrapper[4757]: I1006 13:55:57.873188 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e-lock\") pod \"swift-storage-0\" (UID: \"cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e\") " pod="openstack/swift-storage-0" Oct 06 13:55:57 crc kubenswrapper[4757]: I1006 13:55:57.873199 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e-cache\") pod \"swift-storage-0\" (UID: \"cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e\") " pod="openstack/swift-storage-0" Oct 06 13:55:57 crc kubenswrapper[4757]: I1006 13:55:57.873451 4757 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"swift-storage-0\" (UID: \"cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/swift-storage-0" Oct 06 13:55:57 crc kubenswrapper[4757]: I1006 13:55:57.897213 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"swift-storage-0\" (UID: \"cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e\") " pod="openstack/swift-storage-0" Oct 06 13:55:57 crc kubenswrapper[4757]: I1006 13:55:57.909342 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6g72g\" (UniqueName: \"kubernetes.io/projected/cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e-kube-api-access-6g72g\") pod \"swift-storage-0\" (UID: \"cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e\") " pod="openstack/swift-storage-0" Oct 06 13:55:58 crc kubenswrapper[4757]: I1006 13:55:58.380617 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e-etc-swift\") pod \"swift-storage-0\" (UID: \"cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e\") " pod="openstack/swift-storage-0" Oct 06 13:55:58 crc kubenswrapper[4757]: E1006 13:55:58.380877 4757 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Oct 06 13:55:58 crc kubenswrapper[4757]: E1006 13:55:58.381142 4757 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Oct 06 13:55:58 crc kubenswrapper[4757]: E1006 13:55:58.381211 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e-etc-swift podName:cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e nodeName:}" failed. No retries permitted until 2025-10-06 13:55:59.381187468 +0000 UTC m=+1047.878506015 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e-etc-swift") pod "swift-storage-0" (UID: "cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e") : configmap "swift-ring-files" not found Oct 06 13:55:58 crc kubenswrapper[4757]: I1006 13:55:58.709671 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-qwgbm" event={"ID":"c6437e0a-2b4e-4f14-b7b8-573d464fad02","Type":"ContainerStarted","Data":"c03ad77e6eab96b0f4db0d65b0073b2f4981616ff4113026446bdb1c1cfbe544"} Oct 06 13:55:58 crc kubenswrapper[4757]: I1006 13:55:58.711404 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-7hclp" event={"ID":"022bc107-84fd-4650-a5a3-533d42aef0f0","Type":"ContainerStarted","Data":"ea10fa561dc98e7d6bbfee54e36f1a3d032db95d0fe11323ec54fed9a2ecc0c3"} Oct 06 13:55:58 crc kubenswrapper[4757]: I1006 13:55:58.732831 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-create-7hclp" podStartSLOduration=4.73281235 podStartE2EDuration="4.73281235s" podCreationTimestamp="2025-10-06 13:55:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:55:58.726127114 +0000 UTC m=+1047.223445651" watchObservedRunningTime="2025-10-06 13:55:58.73281235 +0000 UTC m=+1047.230130877" Oct 06 13:55:59 crc kubenswrapper[4757]: I1006 13:55:59.382062 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bd9d686b9-ktnrb" Oct 06 13:55:59 crc kubenswrapper[4757]: I1006 13:55:59.404155 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e-etc-swift\") pod \"swift-storage-0\" (UID: \"cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e\") " pod="openstack/swift-storage-0" Oct 06 13:55:59 crc kubenswrapper[4757]: E1006 13:55:59.404398 4757 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Oct 06 13:55:59 crc kubenswrapper[4757]: E1006 13:55:59.404430 4757 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Oct 06 13:55:59 crc kubenswrapper[4757]: E1006 13:55:59.404497 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e-etc-swift podName:cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e nodeName:}" failed. No retries permitted until 2025-10-06 13:56:01.404470463 +0000 UTC m=+1049.901789000 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e-etc-swift") pod "swift-storage-0" (UID: "cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e") : configmap "swift-ring-files" not found Oct 06 13:55:59 crc kubenswrapper[4757]: I1006 13:55:59.505410 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2d710dd3-baf3-450d-b165-0ddc6f0844a8-ovsdbserver-sb\") pod \"2d710dd3-baf3-450d-b165-0ddc6f0844a8\" (UID: \"2d710dd3-baf3-450d-b165-0ddc6f0844a8\") " Oct 06 13:55:59 crc kubenswrapper[4757]: I1006 13:55:59.505471 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2d710dd3-baf3-450d-b165-0ddc6f0844a8-config\") pod \"2d710dd3-baf3-450d-b165-0ddc6f0844a8\" (UID: \"2d710dd3-baf3-450d-b165-0ddc6f0844a8\") " Oct 06 13:55:59 crc kubenswrapper[4757]: I1006 13:55:59.505506 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2d710dd3-baf3-450d-b165-0ddc6f0844a8-dns-svc\") pod \"2d710dd3-baf3-450d-b165-0ddc6f0844a8\" (UID: \"2d710dd3-baf3-450d-b165-0ddc6f0844a8\") " Oct 06 13:55:59 crc kubenswrapper[4757]: I1006 13:55:59.505644 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tzjkr\" (UniqueName: \"kubernetes.io/projected/2d710dd3-baf3-450d-b165-0ddc6f0844a8-kube-api-access-tzjkr\") pod \"2d710dd3-baf3-450d-b165-0ddc6f0844a8\" (UID: \"2d710dd3-baf3-450d-b165-0ddc6f0844a8\") " Oct 06 13:55:59 crc kubenswrapper[4757]: I1006 13:55:59.505706 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2d710dd3-baf3-450d-b165-0ddc6f0844a8-ovsdbserver-nb\") pod \"2d710dd3-baf3-450d-b165-0ddc6f0844a8\" (UID: \"2d710dd3-baf3-450d-b165-0ddc6f0844a8\") " Oct 06 13:55:59 crc kubenswrapper[4757]: I1006 13:55:59.515329 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2d710dd3-baf3-450d-b165-0ddc6f0844a8-kube-api-access-tzjkr" (OuterVolumeSpecName: "kube-api-access-tzjkr") pod "2d710dd3-baf3-450d-b165-0ddc6f0844a8" (UID: "2d710dd3-baf3-450d-b165-0ddc6f0844a8"). InnerVolumeSpecName "kube-api-access-tzjkr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:55:59 crc kubenswrapper[4757]: I1006 13:55:59.547153 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2d710dd3-baf3-450d-b165-0ddc6f0844a8-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "2d710dd3-baf3-450d-b165-0ddc6f0844a8" (UID: "2d710dd3-baf3-450d-b165-0ddc6f0844a8"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:55:59 crc kubenswrapper[4757]: I1006 13:55:59.547153 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2d710dd3-baf3-450d-b165-0ddc6f0844a8-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "2d710dd3-baf3-450d-b165-0ddc6f0844a8" (UID: "2d710dd3-baf3-450d-b165-0ddc6f0844a8"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:55:59 crc kubenswrapper[4757]: I1006 13:55:59.551366 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2d710dd3-baf3-450d-b165-0ddc6f0844a8-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "2d710dd3-baf3-450d-b165-0ddc6f0844a8" (UID: "2d710dd3-baf3-450d-b165-0ddc6f0844a8"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:55:59 crc kubenswrapper[4757]: I1006 13:55:59.563365 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2d710dd3-baf3-450d-b165-0ddc6f0844a8-config" (OuterVolumeSpecName: "config") pod "2d710dd3-baf3-450d-b165-0ddc6f0844a8" (UID: "2d710dd3-baf3-450d-b165-0ddc6f0844a8"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:55:59 crc kubenswrapper[4757]: I1006 13:55:59.607637 4757 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2d710dd3-baf3-450d-b165-0ddc6f0844a8-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 06 13:55:59 crc kubenswrapper[4757]: I1006 13:55:59.607668 4757 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2d710dd3-baf3-450d-b165-0ddc6f0844a8-config\") on node \"crc\" DevicePath \"\"" Oct 06 13:55:59 crc kubenswrapper[4757]: I1006 13:55:59.607676 4757 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2d710dd3-baf3-450d-b165-0ddc6f0844a8-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 06 13:55:59 crc kubenswrapper[4757]: I1006 13:55:59.607686 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tzjkr\" (UniqueName: \"kubernetes.io/projected/2d710dd3-baf3-450d-b165-0ddc6f0844a8-kube-api-access-tzjkr\") on node \"crc\" DevicePath \"\"" Oct 06 13:55:59 crc kubenswrapper[4757]: I1006 13:55:59.607696 4757 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2d710dd3-baf3-450d-b165-0ddc6f0844a8-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 06 13:55:59 crc kubenswrapper[4757]: I1006 13:55:59.719247 4757 generic.go:334] "Generic (PLEG): container finished" podID="022bc107-84fd-4650-a5a3-533d42aef0f0" containerID="ea10fa561dc98e7d6bbfee54e36f1a3d032db95d0fe11323ec54fed9a2ecc0c3" exitCode=0 Oct 06 13:55:59 crc kubenswrapper[4757]: I1006 13:55:59.719383 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-7hclp" event={"ID":"022bc107-84fd-4650-a5a3-533d42aef0f0","Type":"ContainerDied","Data":"ea10fa561dc98e7d6bbfee54e36f1a3d032db95d0fe11323ec54fed9a2ecc0c3"} Oct 06 13:55:59 crc kubenswrapper[4757]: I1006 13:55:59.721399 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bd9d686b9-ktnrb" event={"ID":"2d710dd3-baf3-450d-b165-0ddc6f0844a8","Type":"ContainerDied","Data":"05e110adb9ac1d0ac469df4f885302ba7ce585a7e0903a3cc02f964a54c94a45"} Oct 06 13:55:59 crc kubenswrapper[4757]: I1006 13:55:59.721429 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bd9d686b9-ktnrb" Oct 06 13:55:59 crc kubenswrapper[4757]: I1006 13:55:59.721448 4757 scope.go:117] "RemoveContainer" containerID="d8f64fea3522dcbce04186e095ee33f4b42a2dd2759db510a3170f447382aabe" Oct 06 13:55:59 crc kubenswrapper[4757]: I1006 13:55:59.722753 4757 generic.go:334] "Generic (PLEG): container finished" podID="7cd83436-1c2f-4506-8505-c21002020f08" containerID="edc7c0c813673d3c082161a592fea917778977c75438725a6f1503c00cdec014" exitCode=0 Oct 06 13:55:59 crc kubenswrapper[4757]: I1006 13:55:59.722797 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-647b75b6c-c9gqf" event={"ID":"7cd83436-1c2f-4506-8505-c21002020f08","Type":"ContainerDied","Data":"edc7c0c813673d3c082161a592fea917778977c75438725a6f1503c00cdec014"} Oct 06 13:55:59 crc kubenswrapper[4757]: I1006 13:55:59.726148 4757 generic.go:334] "Generic (PLEG): container finished" podID="c6437e0a-2b4e-4f14-b7b8-573d464fad02" containerID="c03ad77e6eab96b0f4db0d65b0073b2f4981616ff4113026446bdb1c1cfbe544" exitCode=0 Oct 06 13:55:59 crc kubenswrapper[4757]: I1006 13:55:59.726338 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-qwgbm" event={"ID":"c6437e0a-2b4e-4f14-b7b8-573d464fad02","Type":"ContainerDied","Data":"c03ad77e6eab96b0f4db0d65b0073b2f4981616ff4113026446bdb1c1cfbe544"} Oct 06 13:55:59 crc kubenswrapper[4757]: I1006 13:55:59.770953 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-bd9d686b9-ktnrb"] Oct 06 13:55:59 crc kubenswrapper[4757]: I1006 13:55:59.772060 4757 scope.go:117] "RemoveContainer" containerID="c745d29e281f96025bec4b46baa962dc0fb51150413f2df98fc6b2e401497c13" Oct 06 13:55:59 crc kubenswrapper[4757]: I1006 13:55:59.782281 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-bd9d686b9-ktnrb"] Oct 06 13:56:00 crc kubenswrapper[4757]: I1006 13:56:00.021684 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-k5f5t"] Oct 06 13:56:00 crc kubenswrapper[4757]: E1006 13:56:00.022058 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2d710dd3-baf3-450d-b165-0ddc6f0844a8" containerName="dnsmasq-dns" Oct 06 13:56:00 crc kubenswrapper[4757]: I1006 13:56:00.022079 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="2d710dd3-baf3-450d-b165-0ddc6f0844a8" containerName="dnsmasq-dns" Oct 06 13:56:00 crc kubenswrapper[4757]: E1006 13:56:00.022128 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2d710dd3-baf3-450d-b165-0ddc6f0844a8" containerName="init" Oct 06 13:56:00 crc kubenswrapper[4757]: I1006 13:56:00.022140 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="2d710dd3-baf3-450d-b165-0ddc6f0844a8" containerName="init" Oct 06 13:56:00 crc kubenswrapper[4757]: I1006 13:56:00.022362 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="2d710dd3-baf3-450d-b165-0ddc6f0844a8" containerName="dnsmasq-dns" Oct 06 13:56:00 crc kubenswrapper[4757]: I1006 13:56:00.022961 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-k5f5t" Oct 06 13:56:00 crc kubenswrapper[4757]: I1006 13:56:00.029144 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-k5f5t"] Oct 06 13:56:00 crc kubenswrapper[4757]: I1006 13:56:00.192325 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2d710dd3-baf3-450d-b165-0ddc6f0844a8" path="/var/lib/kubelet/pods/2d710dd3-baf3-450d-b165-0ddc6f0844a8/volumes" Oct 06 13:56:00 crc kubenswrapper[4757]: I1006 13:56:00.217844 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8cgwg\" (UniqueName: \"kubernetes.io/projected/a5cfcfe4-1c22-4459-a88c-026da067c650-kube-api-access-8cgwg\") pod \"glance-db-create-k5f5t\" (UID: \"a5cfcfe4-1c22-4459-a88c-026da067c650\") " pod="openstack/glance-db-create-k5f5t" Oct 06 13:56:00 crc kubenswrapper[4757]: I1006 13:56:00.319865 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8cgwg\" (UniqueName: \"kubernetes.io/projected/a5cfcfe4-1c22-4459-a88c-026da067c650-kube-api-access-8cgwg\") pod \"glance-db-create-k5f5t\" (UID: \"a5cfcfe4-1c22-4459-a88c-026da067c650\") " pod="openstack/glance-db-create-k5f5t" Oct 06 13:56:00 crc kubenswrapper[4757]: I1006 13:56:00.343932 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8cgwg\" (UniqueName: \"kubernetes.io/projected/a5cfcfe4-1c22-4459-a88c-026da067c650-kube-api-access-8cgwg\") pod \"glance-db-create-k5f5t\" (UID: \"a5cfcfe4-1c22-4459-a88c-026da067c650\") " pod="openstack/glance-db-create-k5f5t" Oct 06 13:56:00 crc kubenswrapper[4757]: I1006 13:56:00.644016 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-k5f5t" Oct 06 13:56:00 crc kubenswrapper[4757]: I1006 13:56:00.757479 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-647b75b6c-c9gqf" event={"ID":"7cd83436-1c2f-4506-8505-c21002020f08","Type":"ContainerStarted","Data":"376c98aafdd1481ec49271aaf4e5afaed826cd382cffa7ebf5b346dddaef55eb"} Oct 06 13:56:00 crc kubenswrapper[4757]: I1006 13:56:00.758144 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-647b75b6c-c9gqf" Oct 06 13:56:00 crc kubenswrapper[4757]: I1006 13:56:00.775972 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-647b75b6c-c9gqf" podStartSLOduration=4.775955636 podStartE2EDuration="4.775955636s" podCreationTimestamp="2025-10-06 13:55:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:56:00.775698698 +0000 UTC m=+1049.273017245" watchObservedRunningTime="2025-10-06 13:56:00.775955636 +0000 UTC m=+1049.273274173" Oct 06 13:56:01 crc kubenswrapper[4757]: I1006 13:56:01.091756 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-k5f5t"] Oct 06 13:56:01 crc kubenswrapper[4757]: I1006 13:56:01.136224 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-qwgbm" Oct 06 13:56:01 crc kubenswrapper[4757]: I1006 13:56:01.145083 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-7hclp" Oct 06 13:56:01 crc kubenswrapper[4757]: I1006 13:56:01.234476 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nsxsh\" (UniqueName: \"kubernetes.io/projected/c6437e0a-2b4e-4f14-b7b8-573d464fad02-kube-api-access-nsxsh\") pod \"c6437e0a-2b4e-4f14-b7b8-573d464fad02\" (UID: \"c6437e0a-2b4e-4f14-b7b8-573d464fad02\") " Oct 06 13:56:01 crc kubenswrapper[4757]: I1006 13:56:01.240892 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c6437e0a-2b4e-4f14-b7b8-573d464fad02-kube-api-access-nsxsh" (OuterVolumeSpecName: "kube-api-access-nsxsh") pod "c6437e0a-2b4e-4f14-b7b8-573d464fad02" (UID: "c6437e0a-2b4e-4f14-b7b8-573d464fad02"). InnerVolumeSpecName "kube-api-access-nsxsh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:56:01 crc kubenswrapper[4757]: I1006 13:56:01.335822 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9gh9d\" (UniqueName: \"kubernetes.io/projected/022bc107-84fd-4650-a5a3-533d42aef0f0-kube-api-access-9gh9d\") pod \"022bc107-84fd-4650-a5a3-533d42aef0f0\" (UID: \"022bc107-84fd-4650-a5a3-533d42aef0f0\") " Oct 06 13:56:01 crc kubenswrapper[4757]: I1006 13:56:01.336263 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nsxsh\" (UniqueName: \"kubernetes.io/projected/c6437e0a-2b4e-4f14-b7b8-573d464fad02-kube-api-access-nsxsh\") on node \"crc\" DevicePath \"\"" Oct 06 13:56:01 crc kubenswrapper[4757]: I1006 13:56:01.341365 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/022bc107-84fd-4650-a5a3-533d42aef0f0-kube-api-access-9gh9d" (OuterVolumeSpecName: "kube-api-access-9gh9d") pod "022bc107-84fd-4650-a5a3-533d42aef0f0" (UID: "022bc107-84fd-4650-a5a3-533d42aef0f0"). InnerVolumeSpecName "kube-api-access-9gh9d". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:56:01 crc kubenswrapper[4757]: I1006 13:56:01.437812 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e-etc-swift\") pod \"swift-storage-0\" (UID: \"cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e\") " pod="openstack/swift-storage-0" Oct 06 13:56:01 crc kubenswrapper[4757]: I1006 13:56:01.437899 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9gh9d\" (UniqueName: \"kubernetes.io/projected/022bc107-84fd-4650-a5a3-533d42aef0f0-kube-api-access-9gh9d\") on node \"crc\" DevicePath \"\"" Oct 06 13:56:01 crc kubenswrapper[4757]: E1006 13:56:01.437976 4757 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Oct 06 13:56:01 crc kubenswrapper[4757]: E1006 13:56:01.437993 4757 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Oct 06 13:56:01 crc kubenswrapper[4757]: E1006 13:56:01.438038 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e-etc-swift podName:cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e nodeName:}" failed. No retries permitted until 2025-10-06 13:56:05.438025195 +0000 UTC m=+1053.935343732 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e-etc-swift") pod "swift-storage-0" (UID: "cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e") : configmap "swift-ring-files" not found Oct 06 13:56:01 crc kubenswrapper[4757]: I1006 13:56:01.531628 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-vpghx"] Oct 06 13:56:01 crc kubenswrapper[4757]: E1006 13:56:01.532068 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="022bc107-84fd-4650-a5a3-533d42aef0f0" containerName="mariadb-database-create" Oct 06 13:56:01 crc kubenswrapper[4757]: I1006 13:56:01.532115 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="022bc107-84fd-4650-a5a3-533d42aef0f0" containerName="mariadb-database-create" Oct 06 13:56:01 crc kubenswrapper[4757]: E1006 13:56:01.532140 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c6437e0a-2b4e-4f14-b7b8-573d464fad02" containerName="mariadb-database-create" Oct 06 13:56:01 crc kubenswrapper[4757]: I1006 13:56:01.532148 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="c6437e0a-2b4e-4f14-b7b8-573d464fad02" containerName="mariadb-database-create" Oct 06 13:56:01 crc kubenswrapper[4757]: I1006 13:56:01.532369 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="c6437e0a-2b4e-4f14-b7b8-573d464fad02" containerName="mariadb-database-create" Oct 06 13:56:01 crc kubenswrapper[4757]: I1006 13:56:01.532395 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="022bc107-84fd-4650-a5a3-533d42aef0f0" containerName="mariadb-database-create" Oct 06 13:56:01 crc kubenswrapper[4757]: I1006 13:56:01.533128 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-vpghx" Oct 06 13:56:01 crc kubenswrapper[4757]: I1006 13:56:01.534798 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Oct 06 13:56:01 crc kubenswrapper[4757]: I1006 13:56:01.534945 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-config-data" Oct 06 13:56:01 crc kubenswrapper[4757]: I1006 13:56:01.535386 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-scripts" Oct 06 13:56:01 crc kubenswrapper[4757]: I1006 13:56:01.546780 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-vpghx"] Oct 06 13:56:01 crc kubenswrapper[4757]: I1006 13:56:01.640422 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/68bdda02-206a-460a-b6a9-7ab492a1f518-etc-swift\") pod \"swift-ring-rebalance-vpghx\" (UID: \"68bdda02-206a-460a-b6a9-7ab492a1f518\") " pod="openstack/swift-ring-rebalance-vpghx" Oct 06 13:56:01 crc kubenswrapper[4757]: I1006 13:56:01.640486 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/68bdda02-206a-460a-b6a9-7ab492a1f518-scripts\") pod \"swift-ring-rebalance-vpghx\" (UID: \"68bdda02-206a-460a-b6a9-7ab492a1f518\") " pod="openstack/swift-ring-rebalance-vpghx" Oct 06 13:56:01 crc kubenswrapper[4757]: I1006 13:56:01.640517 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/68bdda02-206a-460a-b6a9-7ab492a1f518-combined-ca-bundle\") pod \"swift-ring-rebalance-vpghx\" (UID: \"68bdda02-206a-460a-b6a9-7ab492a1f518\") " pod="openstack/swift-ring-rebalance-vpghx" Oct 06 13:56:01 crc kubenswrapper[4757]: I1006 13:56:01.640564 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/68bdda02-206a-460a-b6a9-7ab492a1f518-swiftconf\") pod \"swift-ring-rebalance-vpghx\" (UID: \"68bdda02-206a-460a-b6a9-7ab492a1f518\") " pod="openstack/swift-ring-rebalance-vpghx" Oct 06 13:56:01 crc kubenswrapper[4757]: I1006 13:56:01.640620 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/68bdda02-206a-460a-b6a9-7ab492a1f518-ring-data-devices\") pod \"swift-ring-rebalance-vpghx\" (UID: \"68bdda02-206a-460a-b6a9-7ab492a1f518\") " pod="openstack/swift-ring-rebalance-vpghx" Oct 06 13:56:01 crc kubenswrapper[4757]: I1006 13:56:01.640652 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/68bdda02-206a-460a-b6a9-7ab492a1f518-dispersionconf\") pod \"swift-ring-rebalance-vpghx\" (UID: \"68bdda02-206a-460a-b6a9-7ab492a1f518\") " pod="openstack/swift-ring-rebalance-vpghx" Oct 06 13:56:01 crc kubenswrapper[4757]: I1006 13:56:01.640730 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-64ltx\" (UniqueName: \"kubernetes.io/projected/68bdda02-206a-460a-b6a9-7ab492a1f518-kube-api-access-64ltx\") pod \"swift-ring-rebalance-vpghx\" (UID: \"68bdda02-206a-460a-b6a9-7ab492a1f518\") " pod="openstack/swift-ring-rebalance-vpghx" Oct 06 13:56:01 crc kubenswrapper[4757]: I1006 13:56:01.742207 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/68bdda02-206a-460a-b6a9-7ab492a1f518-ring-data-devices\") pod \"swift-ring-rebalance-vpghx\" (UID: \"68bdda02-206a-460a-b6a9-7ab492a1f518\") " pod="openstack/swift-ring-rebalance-vpghx" Oct 06 13:56:01 crc kubenswrapper[4757]: I1006 13:56:01.742286 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/68bdda02-206a-460a-b6a9-7ab492a1f518-dispersionconf\") pod \"swift-ring-rebalance-vpghx\" (UID: \"68bdda02-206a-460a-b6a9-7ab492a1f518\") " pod="openstack/swift-ring-rebalance-vpghx" Oct 06 13:56:01 crc kubenswrapper[4757]: I1006 13:56:01.742374 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-64ltx\" (UniqueName: \"kubernetes.io/projected/68bdda02-206a-460a-b6a9-7ab492a1f518-kube-api-access-64ltx\") pod \"swift-ring-rebalance-vpghx\" (UID: \"68bdda02-206a-460a-b6a9-7ab492a1f518\") " pod="openstack/swift-ring-rebalance-vpghx" Oct 06 13:56:01 crc kubenswrapper[4757]: I1006 13:56:01.742432 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/68bdda02-206a-460a-b6a9-7ab492a1f518-etc-swift\") pod \"swift-ring-rebalance-vpghx\" (UID: \"68bdda02-206a-460a-b6a9-7ab492a1f518\") " pod="openstack/swift-ring-rebalance-vpghx" Oct 06 13:56:01 crc kubenswrapper[4757]: I1006 13:56:01.742454 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/68bdda02-206a-460a-b6a9-7ab492a1f518-scripts\") pod \"swift-ring-rebalance-vpghx\" (UID: \"68bdda02-206a-460a-b6a9-7ab492a1f518\") " pod="openstack/swift-ring-rebalance-vpghx" Oct 06 13:56:01 crc kubenswrapper[4757]: I1006 13:56:01.742476 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/68bdda02-206a-460a-b6a9-7ab492a1f518-combined-ca-bundle\") pod \"swift-ring-rebalance-vpghx\" (UID: \"68bdda02-206a-460a-b6a9-7ab492a1f518\") " pod="openstack/swift-ring-rebalance-vpghx" Oct 06 13:56:01 crc kubenswrapper[4757]: I1006 13:56:01.742514 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/68bdda02-206a-460a-b6a9-7ab492a1f518-swiftconf\") pod \"swift-ring-rebalance-vpghx\" (UID: \"68bdda02-206a-460a-b6a9-7ab492a1f518\") " pod="openstack/swift-ring-rebalance-vpghx" Oct 06 13:56:01 crc kubenswrapper[4757]: I1006 13:56:01.744179 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/68bdda02-206a-460a-b6a9-7ab492a1f518-ring-data-devices\") pod \"swift-ring-rebalance-vpghx\" (UID: \"68bdda02-206a-460a-b6a9-7ab492a1f518\") " pod="openstack/swift-ring-rebalance-vpghx" Oct 06 13:56:01 crc kubenswrapper[4757]: I1006 13:56:01.744456 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/68bdda02-206a-460a-b6a9-7ab492a1f518-scripts\") pod \"swift-ring-rebalance-vpghx\" (UID: \"68bdda02-206a-460a-b6a9-7ab492a1f518\") " pod="openstack/swift-ring-rebalance-vpghx" Oct 06 13:56:01 crc kubenswrapper[4757]: I1006 13:56:01.744522 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/68bdda02-206a-460a-b6a9-7ab492a1f518-etc-swift\") pod \"swift-ring-rebalance-vpghx\" (UID: \"68bdda02-206a-460a-b6a9-7ab492a1f518\") " pod="openstack/swift-ring-rebalance-vpghx" Oct 06 13:56:01 crc kubenswrapper[4757]: I1006 13:56:01.747176 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/68bdda02-206a-460a-b6a9-7ab492a1f518-swiftconf\") pod \"swift-ring-rebalance-vpghx\" (UID: \"68bdda02-206a-460a-b6a9-7ab492a1f518\") " pod="openstack/swift-ring-rebalance-vpghx" Oct 06 13:56:01 crc kubenswrapper[4757]: I1006 13:56:01.747473 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/68bdda02-206a-460a-b6a9-7ab492a1f518-combined-ca-bundle\") pod \"swift-ring-rebalance-vpghx\" (UID: \"68bdda02-206a-460a-b6a9-7ab492a1f518\") " pod="openstack/swift-ring-rebalance-vpghx" Oct 06 13:56:01 crc kubenswrapper[4757]: I1006 13:56:01.748017 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/68bdda02-206a-460a-b6a9-7ab492a1f518-dispersionconf\") pod \"swift-ring-rebalance-vpghx\" (UID: \"68bdda02-206a-460a-b6a9-7ab492a1f518\") " pod="openstack/swift-ring-rebalance-vpghx" Oct 06 13:56:01 crc kubenswrapper[4757]: I1006 13:56:01.764019 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-64ltx\" (UniqueName: \"kubernetes.io/projected/68bdda02-206a-460a-b6a9-7ab492a1f518-kube-api-access-64ltx\") pod \"swift-ring-rebalance-vpghx\" (UID: \"68bdda02-206a-460a-b6a9-7ab492a1f518\") " pod="openstack/swift-ring-rebalance-vpghx" Oct 06 13:56:01 crc kubenswrapper[4757]: I1006 13:56:01.767064 4757 generic.go:334] "Generic (PLEG): container finished" podID="a5cfcfe4-1c22-4459-a88c-026da067c650" containerID="a01221e8d3ac0dc2f1c87b5858c652449959b77ec1ba6714a10026c0882576cc" exitCode=0 Oct 06 13:56:01 crc kubenswrapper[4757]: I1006 13:56:01.767146 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-k5f5t" event={"ID":"a5cfcfe4-1c22-4459-a88c-026da067c650","Type":"ContainerDied","Data":"a01221e8d3ac0dc2f1c87b5858c652449959b77ec1ba6714a10026c0882576cc"} Oct 06 13:56:01 crc kubenswrapper[4757]: I1006 13:56:01.767183 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-k5f5t" event={"ID":"a5cfcfe4-1c22-4459-a88c-026da067c650","Type":"ContainerStarted","Data":"a17df85ef5ef38ecb2bc9483c517bd7f3d942f63f88079b2cc16dff65ea28e94"} Oct 06 13:56:01 crc kubenswrapper[4757]: I1006 13:56:01.768606 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-qwgbm" event={"ID":"c6437e0a-2b4e-4f14-b7b8-573d464fad02","Type":"ContainerDied","Data":"386d5255ebc8d010f3fe5c2fea071b1580ee55db07c0851c96ea065fdee7bb79"} Oct 06 13:56:01 crc kubenswrapper[4757]: I1006 13:56:01.768636 4757 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="386d5255ebc8d010f3fe5c2fea071b1580ee55db07c0851c96ea065fdee7bb79" Oct 06 13:56:01 crc kubenswrapper[4757]: I1006 13:56:01.768678 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-qwgbm" Oct 06 13:56:01 crc kubenswrapper[4757]: I1006 13:56:01.772369 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-7hclp" event={"ID":"022bc107-84fd-4650-a5a3-533d42aef0f0","Type":"ContainerDied","Data":"35f4ccbb1b2425ec0ec61efd26e293fee5942d9147f74101be6989308a885084"} Oct 06 13:56:01 crc kubenswrapper[4757]: I1006 13:56:01.772418 4757 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="35f4ccbb1b2425ec0ec61efd26e293fee5942d9147f74101be6989308a885084" Oct 06 13:56:01 crc kubenswrapper[4757]: I1006 13:56:01.772480 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-7hclp" Oct 06 13:56:01 crc kubenswrapper[4757]: I1006 13:56:01.870417 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-vpghx" Oct 06 13:56:02 crc kubenswrapper[4757]: I1006 13:56:02.435943 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-vpghx"] Oct 06 13:56:02 crc kubenswrapper[4757]: W1006 13:56:02.440702 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod68bdda02_206a_460a_b6a9_7ab492a1f518.slice/crio-7b7adfbac8ded7cc1a93ea8e6b3d34f55b9321152e1ea886f60a246b4dbe6991 WatchSource:0}: Error finding container 7b7adfbac8ded7cc1a93ea8e6b3d34f55b9321152e1ea886f60a246b4dbe6991: Status 404 returned error can't find the container with id 7b7adfbac8ded7cc1a93ea8e6b3d34f55b9321152e1ea886f60a246b4dbe6991 Oct 06 13:56:02 crc kubenswrapper[4757]: I1006 13:56:02.779698 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-vpghx" event={"ID":"68bdda02-206a-460a-b6a9-7ab492a1f518","Type":"ContainerStarted","Data":"7b7adfbac8ded7cc1a93ea8e6b3d34f55b9321152e1ea886f60a246b4dbe6991"} Oct 06 13:56:03 crc kubenswrapper[4757]: I1006 13:56:03.192845 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-k5f5t" Oct 06 13:56:03 crc kubenswrapper[4757]: I1006 13:56:03.376065 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8cgwg\" (UniqueName: \"kubernetes.io/projected/a5cfcfe4-1c22-4459-a88c-026da067c650-kube-api-access-8cgwg\") pod \"a5cfcfe4-1c22-4459-a88c-026da067c650\" (UID: \"a5cfcfe4-1c22-4459-a88c-026da067c650\") " Oct 06 13:56:03 crc kubenswrapper[4757]: I1006 13:56:03.382847 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a5cfcfe4-1c22-4459-a88c-026da067c650-kube-api-access-8cgwg" (OuterVolumeSpecName: "kube-api-access-8cgwg") pod "a5cfcfe4-1c22-4459-a88c-026da067c650" (UID: "a5cfcfe4-1c22-4459-a88c-026da067c650"). InnerVolumeSpecName "kube-api-access-8cgwg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:56:03 crc kubenswrapper[4757]: I1006 13:56:03.478577 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8cgwg\" (UniqueName: \"kubernetes.io/projected/a5cfcfe4-1c22-4459-a88c-026da067c650-kube-api-access-8cgwg\") on node \"crc\" DevicePath \"\"" Oct 06 13:56:03 crc kubenswrapper[4757]: I1006 13:56:03.788777 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-k5f5t" event={"ID":"a5cfcfe4-1c22-4459-a88c-026da067c650","Type":"ContainerDied","Data":"a17df85ef5ef38ecb2bc9483c517bd7f3d942f63f88079b2cc16dff65ea28e94"} Oct 06 13:56:03 crc kubenswrapper[4757]: I1006 13:56:03.788830 4757 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a17df85ef5ef38ecb2bc9483c517bd7f3d942f63f88079b2cc16dff65ea28e94" Oct 06 13:56:03 crc kubenswrapper[4757]: I1006 13:56:03.788858 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-k5f5t" Oct 06 13:56:04 crc kubenswrapper[4757]: I1006 13:56:04.431754 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Oct 06 13:56:05 crc kubenswrapper[4757]: I1006 13:56:05.460457 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Oct 06 13:56:05 crc kubenswrapper[4757]: I1006 13:56:05.510939 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e-etc-swift\") pod \"swift-storage-0\" (UID: \"cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e\") " pod="openstack/swift-storage-0" Oct 06 13:56:05 crc kubenswrapper[4757]: E1006 13:56:05.511120 4757 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Oct 06 13:56:05 crc kubenswrapper[4757]: E1006 13:56:05.511151 4757 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Oct 06 13:56:05 crc kubenswrapper[4757]: E1006 13:56:05.511216 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e-etc-swift podName:cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e nodeName:}" failed. No retries permitted until 2025-10-06 13:56:13.511194574 +0000 UTC m=+1062.008513131 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e-etc-swift") pod "swift-storage-0" (UID: "cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e") : configmap "swift-ring-files" not found Oct 06 13:56:05 crc kubenswrapper[4757]: I1006 13:56:05.815942 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-vpghx" event={"ID":"68bdda02-206a-460a-b6a9-7ab492a1f518","Type":"ContainerStarted","Data":"47b53b2403b9eba16bf691abd65e159f7ebc8cbf8525c716c07c59cbfd29b411"} Oct 06 13:56:05 crc kubenswrapper[4757]: I1006 13:56:05.832864 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-ring-rebalance-vpghx" podStartSLOduration=1.7721141550000001 podStartE2EDuration="4.832841643s" podCreationTimestamp="2025-10-06 13:56:01 +0000 UTC" firstStartedPulling="2025-10-06 13:56:02.443530312 +0000 UTC m=+1050.940848859" lastFinishedPulling="2025-10-06 13:56:05.50425779 +0000 UTC m=+1054.001576347" observedRunningTime="2025-10-06 13:56:05.830547903 +0000 UTC m=+1054.327866440" watchObservedRunningTime="2025-10-06 13:56:05.832841643 +0000 UTC m=+1054.330160220" Oct 06 13:56:06 crc kubenswrapper[4757]: I1006 13:56:06.806428 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-647b75b6c-c9gqf" Oct 06 13:56:06 crc kubenswrapper[4757]: I1006 13:56:06.895394 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-74b957b89f-7vw7l"] Oct 06 13:56:06 crc kubenswrapper[4757]: I1006 13:56:06.895681 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-74b957b89f-7vw7l" podUID="1df6a3ef-a968-4f91-a58d-4fa75a44130a" containerName="dnsmasq-dns" containerID="cri-o://b4caf5474c162c7d3ccc48ab1945ea546acd05b6cc83be1352912310e8b93cca" gracePeriod=10 Oct 06 13:56:07 crc kubenswrapper[4757]: I1006 13:56:07.426748 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74b957b89f-7vw7l" Oct 06 13:56:07 crc kubenswrapper[4757]: I1006 13:56:07.546411 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sjx2z\" (UniqueName: \"kubernetes.io/projected/1df6a3ef-a968-4f91-a58d-4fa75a44130a-kube-api-access-sjx2z\") pod \"1df6a3ef-a968-4f91-a58d-4fa75a44130a\" (UID: \"1df6a3ef-a968-4f91-a58d-4fa75a44130a\") " Oct 06 13:56:07 crc kubenswrapper[4757]: I1006 13:56:07.546507 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1df6a3ef-a968-4f91-a58d-4fa75a44130a-dns-svc\") pod \"1df6a3ef-a968-4f91-a58d-4fa75a44130a\" (UID: \"1df6a3ef-a968-4f91-a58d-4fa75a44130a\") " Oct 06 13:56:07 crc kubenswrapper[4757]: I1006 13:56:07.546671 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1df6a3ef-a968-4f91-a58d-4fa75a44130a-config\") pod \"1df6a3ef-a968-4f91-a58d-4fa75a44130a\" (UID: \"1df6a3ef-a968-4f91-a58d-4fa75a44130a\") " Oct 06 13:56:07 crc kubenswrapper[4757]: I1006 13:56:07.554572 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1df6a3ef-a968-4f91-a58d-4fa75a44130a-kube-api-access-sjx2z" (OuterVolumeSpecName: "kube-api-access-sjx2z") pod "1df6a3ef-a968-4f91-a58d-4fa75a44130a" (UID: "1df6a3ef-a968-4f91-a58d-4fa75a44130a"). InnerVolumeSpecName "kube-api-access-sjx2z". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:56:07 crc kubenswrapper[4757]: I1006 13:56:07.607723 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1df6a3ef-a968-4f91-a58d-4fa75a44130a-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "1df6a3ef-a968-4f91-a58d-4fa75a44130a" (UID: "1df6a3ef-a968-4f91-a58d-4fa75a44130a"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:56:07 crc kubenswrapper[4757]: I1006 13:56:07.628831 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1df6a3ef-a968-4f91-a58d-4fa75a44130a-config" (OuterVolumeSpecName: "config") pod "1df6a3ef-a968-4f91-a58d-4fa75a44130a" (UID: "1df6a3ef-a968-4f91-a58d-4fa75a44130a"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:56:07 crc kubenswrapper[4757]: I1006 13:56:07.648152 4757 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1df6a3ef-a968-4f91-a58d-4fa75a44130a-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 06 13:56:07 crc kubenswrapper[4757]: I1006 13:56:07.648187 4757 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1df6a3ef-a968-4f91-a58d-4fa75a44130a-config\") on node \"crc\" DevicePath \"\"" Oct 06 13:56:07 crc kubenswrapper[4757]: I1006 13:56:07.648198 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sjx2z\" (UniqueName: \"kubernetes.io/projected/1df6a3ef-a968-4f91-a58d-4fa75a44130a-kube-api-access-sjx2z\") on node \"crc\" DevicePath \"\"" Oct 06 13:56:07 crc kubenswrapper[4757]: I1006 13:56:07.848126 4757 generic.go:334] "Generic (PLEG): container finished" podID="1df6a3ef-a968-4f91-a58d-4fa75a44130a" containerID="b4caf5474c162c7d3ccc48ab1945ea546acd05b6cc83be1352912310e8b93cca" exitCode=0 Oct 06 13:56:07 crc kubenswrapper[4757]: I1006 13:56:07.848194 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74b957b89f-7vw7l" event={"ID":"1df6a3ef-a968-4f91-a58d-4fa75a44130a","Type":"ContainerDied","Data":"b4caf5474c162c7d3ccc48ab1945ea546acd05b6cc83be1352912310e8b93cca"} Oct 06 13:56:07 crc kubenswrapper[4757]: I1006 13:56:07.848252 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74b957b89f-7vw7l" Oct 06 13:56:07 crc kubenswrapper[4757]: I1006 13:56:07.848387 4757 scope.go:117] "RemoveContainer" containerID="b4caf5474c162c7d3ccc48ab1945ea546acd05b6cc83be1352912310e8b93cca" Oct 06 13:56:07 crc kubenswrapper[4757]: I1006 13:56:07.848263 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74b957b89f-7vw7l" event={"ID":"1df6a3ef-a968-4f91-a58d-4fa75a44130a","Type":"ContainerDied","Data":"09d6532564c2ff39634ab2937bbc90583b737a3d4ee0f0e10b2fe417184b6a7a"} Oct 06 13:56:07 crc kubenswrapper[4757]: I1006 13:56:07.873569 4757 scope.go:117] "RemoveContainer" containerID="d03f3c1a72cbb023719b4eec0bdb3c871ea5430f1e9820868a726fa30b749820" Oct 06 13:56:07 crc kubenswrapper[4757]: I1006 13:56:07.897796 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-74b957b89f-7vw7l"] Oct 06 13:56:07 crc kubenswrapper[4757]: I1006 13:56:07.905363 4757 scope.go:117] "RemoveContainer" containerID="b4caf5474c162c7d3ccc48ab1945ea546acd05b6cc83be1352912310e8b93cca" Oct 06 13:56:07 crc kubenswrapper[4757]: E1006 13:56:07.905745 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b4caf5474c162c7d3ccc48ab1945ea546acd05b6cc83be1352912310e8b93cca\": container with ID starting with b4caf5474c162c7d3ccc48ab1945ea546acd05b6cc83be1352912310e8b93cca not found: ID does not exist" containerID="b4caf5474c162c7d3ccc48ab1945ea546acd05b6cc83be1352912310e8b93cca" Oct 06 13:56:07 crc kubenswrapper[4757]: I1006 13:56:07.905789 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b4caf5474c162c7d3ccc48ab1945ea546acd05b6cc83be1352912310e8b93cca"} err="failed to get container status \"b4caf5474c162c7d3ccc48ab1945ea546acd05b6cc83be1352912310e8b93cca\": rpc error: code = NotFound desc = could not find container \"b4caf5474c162c7d3ccc48ab1945ea546acd05b6cc83be1352912310e8b93cca\": container with ID starting with b4caf5474c162c7d3ccc48ab1945ea546acd05b6cc83be1352912310e8b93cca not found: ID does not exist" Oct 06 13:56:07 crc kubenswrapper[4757]: I1006 13:56:07.905821 4757 scope.go:117] "RemoveContainer" containerID="d03f3c1a72cbb023719b4eec0bdb3c871ea5430f1e9820868a726fa30b749820" Oct 06 13:56:07 crc kubenswrapper[4757]: I1006 13:56:07.906253 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-74b957b89f-7vw7l"] Oct 06 13:56:07 crc kubenswrapper[4757]: E1006 13:56:07.906741 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d03f3c1a72cbb023719b4eec0bdb3c871ea5430f1e9820868a726fa30b749820\": container with ID starting with d03f3c1a72cbb023719b4eec0bdb3c871ea5430f1e9820868a726fa30b749820 not found: ID does not exist" containerID="d03f3c1a72cbb023719b4eec0bdb3c871ea5430f1e9820868a726fa30b749820" Oct 06 13:56:07 crc kubenswrapper[4757]: I1006 13:56:07.906798 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d03f3c1a72cbb023719b4eec0bdb3c871ea5430f1e9820868a726fa30b749820"} err="failed to get container status \"d03f3c1a72cbb023719b4eec0bdb3c871ea5430f1e9820868a726fa30b749820\": rpc error: code = NotFound desc = could not find container \"d03f3c1a72cbb023719b4eec0bdb3c871ea5430f1e9820868a726fa30b749820\": container with ID starting with d03f3c1a72cbb023719b4eec0bdb3c871ea5430f1e9820868a726fa30b749820 not found: ID does not exist" Oct 06 13:56:08 crc kubenswrapper[4757]: I1006 13:56:08.206632 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1df6a3ef-a968-4f91-a58d-4fa75a44130a" path="/var/lib/kubelet/pods/1df6a3ef-a968-4f91-a58d-4fa75a44130a/volumes" Oct 06 13:56:10 crc kubenswrapper[4757]: I1006 13:56:10.096165 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-a940-account-create-wq96v"] Oct 06 13:56:10 crc kubenswrapper[4757]: E1006 13:56:10.096889 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1df6a3ef-a968-4f91-a58d-4fa75a44130a" containerName="dnsmasq-dns" Oct 06 13:56:10 crc kubenswrapper[4757]: I1006 13:56:10.096902 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="1df6a3ef-a968-4f91-a58d-4fa75a44130a" containerName="dnsmasq-dns" Oct 06 13:56:10 crc kubenswrapper[4757]: E1006 13:56:10.096916 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a5cfcfe4-1c22-4459-a88c-026da067c650" containerName="mariadb-database-create" Oct 06 13:56:10 crc kubenswrapper[4757]: I1006 13:56:10.096922 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="a5cfcfe4-1c22-4459-a88c-026da067c650" containerName="mariadb-database-create" Oct 06 13:56:10 crc kubenswrapper[4757]: E1006 13:56:10.096937 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1df6a3ef-a968-4f91-a58d-4fa75a44130a" containerName="init" Oct 06 13:56:10 crc kubenswrapper[4757]: I1006 13:56:10.096942 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="1df6a3ef-a968-4f91-a58d-4fa75a44130a" containerName="init" Oct 06 13:56:10 crc kubenswrapper[4757]: I1006 13:56:10.097515 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="a5cfcfe4-1c22-4459-a88c-026da067c650" containerName="mariadb-database-create" Oct 06 13:56:10 crc kubenswrapper[4757]: I1006 13:56:10.097548 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="1df6a3ef-a968-4f91-a58d-4fa75a44130a" containerName="dnsmasq-dns" Oct 06 13:56:10 crc kubenswrapper[4757]: I1006 13:56:10.098113 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-a940-account-create-wq96v" Oct 06 13:56:10 crc kubenswrapper[4757]: I1006 13:56:10.100942 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Oct 06 13:56:10 crc kubenswrapper[4757]: I1006 13:56:10.115631 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8jk4r\" (UniqueName: \"kubernetes.io/projected/c2681f2d-ee52-4508-81f8-4e20289fb03d-kube-api-access-8jk4r\") pod \"glance-a940-account-create-wq96v\" (UID: \"c2681f2d-ee52-4508-81f8-4e20289fb03d\") " pod="openstack/glance-a940-account-create-wq96v" Oct 06 13:56:10 crc kubenswrapper[4757]: I1006 13:56:10.118725 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-a940-account-create-wq96v"] Oct 06 13:56:10 crc kubenswrapper[4757]: I1006 13:56:10.218170 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8jk4r\" (UniqueName: \"kubernetes.io/projected/c2681f2d-ee52-4508-81f8-4e20289fb03d-kube-api-access-8jk4r\") pod \"glance-a940-account-create-wq96v\" (UID: \"c2681f2d-ee52-4508-81f8-4e20289fb03d\") " pod="openstack/glance-a940-account-create-wq96v" Oct 06 13:56:10 crc kubenswrapper[4757]: I1006 13:56:10.240737 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8jk4r\" (UniqueName: \"kubernetes.io/projected/c2681f2d-ee52-4508-81f8-4e20289fb03d-kube-api-access-8jk4r\") pod \"glance-a940-account-create-wq96v\" (UID: \"c2681f2d-ee52-4508-81f8-4e20289fb03d\") " pod="openstack/glance-a940-account-create-wq96v" Oct 06 13:56:10 crc kubenswrapper[4757]: I1006 13:56:10.427504 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-a940-account-create-wq96v" Oct 06 13:56:10 crc kubenswrapper[4757]: I1006 13:56:10.879266 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-a940-account-create-wq96v"] Oct 06 13:56:10 crc kubenswrapper[4757]: W1006 13:56:10.887074 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc2681f2d_ee52_4508_81f8_4e20289fb03d.slice/crio-a3661882f2c65c00811f6a1a52efbc43f9c24c00e907d67b4b18b64f2dc6194e WatchSource:0}: Error finding container a3661882f2c65c00811f6a1a52efbc43f9c24c00e907d67b4b18b64f2dc6194e: Status 404 returned error can't find the container with id a3661882f2c65c00811f6a1a52efbc43f9c24c00e907d67b4b18b64f2dc6194e Oct 06 13:56:11 crc kubenswrapper[4757]: I1006 13:56:11.893160 4757 generic.go:334] "Generic (PLEG): container finished" podID="c2681f2d-ee52-4508-81f8-4e20289fb03d" containerID="7ac68cc64c4f397d9ab7370c8469fd24d779a4e388ecba14d87d13a324cae7f5" exitCode=0 Oct 06 13:56:11 crc kubenswrapper[4757]: I1006 13:56:11.893217 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-a940-account-create-wq96v" event={"ID":"c2681f2d-ee52-4508-81f8-4e20289fb03d","Type":"ContainerDied","Data":"7ac68cc64c4f397d9ab7370c8469fd24d779a4e388ecba14d87d13a324cae7f5"} Oct 06 13:56:11 crc kubenswrapper[4757]: I1006 13:56:11.893438 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-a940-account-create-wq96v" event={"ID":"c2681f2d-ee52-4508-81f8-4e20289fb03d","Type":"ContainerStarted","Data":"a3661882f2c65c00811f6a1a52efbc43f9c24c00e907d67b4b18b64f2dc6194e"} Oct 06 13:56:12 crc kubenswrapper[4757]: I1006 13:56:12.905918 4757 generic.go:334] "Generic (PLEG): container finished" podID="68bdda02-206a-460a-b6a9-7ab492a1f518" containerID="47b53b2403b9eba16bf691abd65e159f7ebc8cbf8525c716c07c59cbfd29b411" exitCode=0 Oct 06 13:56:12 crc kubenswrapper[4757]: I1006 13:56:12.906040 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-vpghx" event={"ID":"68bdda02-206a-460a-b6a9-7ab492a1f518","Type":"ContainerDied","Data":"47b53b2403b9eba16bf691abd65e159f7ebc8cbf8525c716c07c59cbfd29b411"} Oct 06 13:56:13 crc kubenswrapper[4757]: I1006 13:56:13.213895 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-a940-account-create-wq96v" Oct 06 13:56:13 crc kubenswrapper[4757]: I1006 13:56:13.375321 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8jk4r\" (UniqueName: \"kubernetes.io/projected/c2681f2d-ee52-4508-81f8-4e20289fb03d-kube-api-access-8jk4r\") pod \"c2681f2d-ee52-4508-81f8-4e20289fb03d\" (UID: \"c2681f2d-ee52-4508-81f8-4e20289fb03d\") " Oct 06 13:56:13 crc kubenswrapper[4757]: I1006 13:56:13.387444 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c2681f2d-ee52-4508-81f8-4e20289fb03d-kube-api-access-8jk4r" (OuterVolumeSpecName: "kube-api-access-8jk4r") pod "c2681f2d-ee52-4508-81f8-4e20289fb03d" (UID: "c2681f2d-ee52-4508-81f8-4e20289fb03d"). InnerVolumeSpecName "kube-api-access-8jk4r". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:56:13 crc kubenswrapper[4757]: I1006 13:56:13.477593 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8jk4r\" (UniqueName: \"kubernetes.io/projected/c2681f2d-ee52-4508-81f8-4e20289fb03d-kube-api-access-8jk4r\") on node \"crc\" DevicePath \"\"" Oct 06 13:56:13 crc kubenswrapper[4757]: I1006 13:56:13.579528 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e-etc-swift\") pod \"swift-storage-0\" (UID: \"cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e\") " pod="openstack/swift-storage-0" Oct 06 13:56:13 crc kubenswrapper[4757]: I1006 13:56:13.588055 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e-etc-swift\") pod \"swift-storage-0\" (UID: \"cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e\") " pod="openstack/swift-storage-0" Oct 06 13:56:13 crc kubenswrapper[4757]: I1006 13:56:13.808019 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Oct 06 13:56:13 crc kubenswrapper[4757]: I1006 13:56:13.929118 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-a940-account-create-wq96v" Oct 06 13:56:13 crc kubenswrapper[4757]: I1006 13:56:13.929170 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-a940-account-create-wq96v" event={"ID":"c2681f2d-ee52-4508-81f8-4e20289fb03d","Type":"ContainerDied","Data":"a3661882f2c65c00811f6a1a52efbc43f9c24c00e907d67b4b18b64f2dc6194e"} Oct 06 13:56:13 crc kubenswrapper[4757]: I1006 13:56:13.929569 4757 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a3661882f2c65c00811f6a1a52efbc43f9c24c00e907d67b4b18b64f2dc6194e" Oct 06 13:56:14 crc kubenswrapper[4757]: I1006 13:56:14.336436 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-vpghx" Oct 06 13:56:14 crc kubenswrapper[4757]: I1006 13:56:14.450818 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Oct 06 13:56:14 crc kubenswrapper[4757]: W1006 13:56:14.461445 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcf3caac3_9cc6_43d2_a3e6_2e8c02cb9c0e.slice/crio-363e1baa670fcb46278dc06e1f0ddf747b310ed231897f327adc82e7bb2771b2 WatchSource:0}: Error finding container 363e1baa670fcb46278dc06e1f0ddf747b310ed231897f327adc82e7bb2771b2: Status 404 returned error can't find the container with id 363e1baa670fcb46278dc06e1f0ddf747b310ed231897f327adc82e7bb2771b2 Oct 06 13:56:14 crc kubenswrapper[4757]: I1006 13:56:14.475932 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-2941-account-create-7cmrj"] Oct 06 13:56:14 crc kubenswrapper[4757]: E1006 13:56:14.476694 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c2681f2d-ee52-4508-81f8-4e20289fb03d" containerName="mariadb-account-create" Oct 06 13:56:14 crc kubenswrapper[4757]: I1006 13:56:14.476714 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="c2681f2d-ee52-4508-81f8-4e20289fb03d" containerName="mariadb-account-create" Oct 06 13:56:14 crc kubenswrapper[4757]: E1006 13:56:14.476750 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="68bdda02-206a-460a-b6a9-7ab492a1f518" containerName="swift-ring-rebalance" Oct 06 13:56:14 crc kubenswrapper[4757]: I1006 13:56:14.476760 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="68bdda02-206a-460a-b6a9-7ab492a1f518" containerName="swift-ring-rebalance" Oct 06 13:56:14 crc kubenswrapper[4757]: I1006 13:56:14.477142 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="c2681f2d-ee52-4508-81f8-4e20289fb03d" containerName="mariadb-account-create" Oct 06 13:56:14 crc kubenswrapper[4757]: I1006 13:56:14.477167 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="68bdda02-206a-460a-b6a9-7ab492a1f518" containerName="swift-ring-rebalance" Oct 06 13:56:14 crc kubenswrapper[4757]: I1006 13:56:14.478308 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-2941-account-create-7cmrj" Oct 06 13:56:14 crc kubenswrapper[4757]: I1006 13:56:14.483412 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Oct 06 13:56:14 crc kubenswrapper[4757]: I1006 13:56:14.499955 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/68bdda02-206a-460a-b6a9-7ab492a1f518-ring-data-devices\") pod \"68bdda02-206a-460a-b6a9-7ab492a1f518\" (UID: \"68bdda02-206a-460a-b6a9-7ab492a1f518\") " Oct 06 13:56:14 crc kubenswrapper[4757]: I1006 13:56:14.500657 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/68bdda02-206a-460a-b6a9-7ab492a1f518-swiftconf\") pod \"68bdda02-206a-460a-b6a9-7ab492a1f518\" (UID: \"68bdda02-206a-460a-b6a9-7ab492a1f518\") " Oct 06 13:56:14 crc kubenswrapper[4757]: I1006 13:56:14.500740 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/68bdda02-206a-460a-b6a9-7ab492a1f518-etc-swift\") pod \"68bdda02-206a-460a-b6a9-7ab492a1f518\" (UID: \"68bdda02-206a-460a-b6a9-7ab492a1f518\") " Oct 06 13:56:14 crc kubenswrapper[4757]: I1006 13:56:14.500815 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/68bdda02-206a-460a-b6a9-7ab492a1f518-scripts\") pod \"68bdda02-206a-460a-b6a9-7ab492a1f518\" (UID: \"68bdda02-206a-460a-b6a9-7ab492a1f518\") " Oct 06 13:56:14 crc kubenswrapper[4757]: I1006 13:56:14.500871 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/68bdda02-206a-460a-b6a9-7ab492a1f518-combined-ca-bundle\") pod \"68bdda02-206a-460a-b6a9-7ab492a1f518\" (UID: \"68bdda02-206a-460a-b6a9-7ab492a1f518\") " Oct 06 13:56:14 crc kubenswrapper[4757]: I1006 13:56:14.500902 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/68bdda02-206a-460a-b6a9-7ab492a1f518-dispersionconf\") pod \"68bdda02-206a-460a-b6a9-7ab492a1f518\" (UID: \"68bdda02-206a-460a-b6a9-7ab492a1f518\") " Oct 06 13:56:14 crc kubenswrapper[4757]: I1006 13:56:14.501750 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-64ltx\" (UniqueName: \"kubernetes.io/projected/68bdda02-206a-460a-b6a9-7ab492a1f518-kube-api-access-64ltx\") pod \"68bdda02-206a-460a-b6a9-7ab492a1f518\" (UID: \"68bdda02-206a-460a-b6a9-7ab492a1f518\") " Oct 06 13:56:14 crc kubenswrapper[4757]: I1006 13:56:14.502087 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/68bdda02-206a-460a-b6a9-7ab492a1f518-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "68bdda02-206a-460a-b6a9-7ab492a1f518" (UID: "68bdda02-206a-460a-b6a9-7ab492a1f518"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:56:14 crc kubenswrapper[4757]: I1006 13:56:14.502535 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gptgq\" (UniqueName: \"kubernetes.io/projected/e0f6378b-8aab-41e0-a041-77d6eaa303b7-kube-api-access-gptgq\") pod \"keystone-2941-account-create-7cmrj\" (UID: \"e0f6378b-8aab-41e0-a041-77d6eaa303b7\") " pod="openstack/keystone-2941-account-create-7cmrj" Oct 06 13:56:14 crc kubenswrapper[4757]: I1006 13:56:14.503227 4757 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/68bdda02-206a-460a-b6a9-7ab492a1f518-ring-data-devices\") on node \"crc\" DevicePath \"\"" Oct 06 13:56:14 crc kubenswrapper[4757]: I1006 13:56:14.503262 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/68bdda02-206a-460a-b6a9-7ab492a1f518-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "68bdda02-206a-460a-b6a9-7ab492a1f518" (UID: "68bdda02-206a-460a-b6a9-7ab492a1f518"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 13:56:14 crc kubenswrapper[4757]: I1006 13:56:14.507819 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/68bdda02-206a-460a-b6a9-7ab492a1f518-kube-api-access-64ltx" (OuterVolumeSpecName: "kube-api-access-64ltx") pod "68bdda02-206a-460a-b6a9-7ab492a1f518" (UID: "68bdda02-206a-460a-b6a9-7ab492a1f518"). InnerVolumeSpecName "kube-api-access-64ltx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:56:14 crc kubenswrapper[4757]: I1006 13:56:14.509460 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-2941-account-create-7cmrj"] Oct 06 13:56:14 crc kubenswrapper[4757]: I1006 13:56:14.515239 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/68bdda02-206a-460a-b6a9-7ab492a1f518-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "68bdda02-206a-460a-b6a9-7ab492a1f518" (UID: "68bdda02-206a-460a-b6a9-7ab492a1f518"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:56:14 crc kubenswrapper[4757]: I1006 13:56:14.522782 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/68bdda02-206a-460a-b6a9-7ab492a1f518-scripts" (OuterVolumeSpecName: "scripts") pod "68bdda02-206a-460a-b6a9-7ab492a1f518" (UID: "68bdda02-206a-460a-b6a9-7ab492a1f518"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:56:14 crc kubenswrapper[4757]: I1006 13:56:14.535730 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/68bdda02-206a-460a-b6a9-7ab492a1f518-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "68bdda02-206a-460a-b6a9-7ab492a1f518" (UID: "68bdda02-206a-460a-b6a9-7ab492a1f518"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:56:14 crc kubenswrapper[4757]: I1006 13:56:14.540848 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/68bdda02-206a-460a-b6a9-7ab492a1f518-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "68bdda02-206a-460a-b6a9-7ab492a1f518" (UID: "68bdda02-206a-460a-b6a9-7ab492a1f518"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:56:14 crc kubenswrapper[4757]: I1006 13:56:14.604425 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gptgq\" (UniqueName: \"kubernetes.io/projected/e0f6378b-8aab-41e0-a041-77d6eaa303b7-kube-api-access-gptgq\") pod \"keystone-2941-account-create-7cmrj\" (UID: \"e0f6378b-8aab-41e0-a041-77d6eaa303b7\") " pod="openstack/keystone-2941-account-create-7cmrj" Oct 06 13:56:14 crc kubenswrapper[4757]: I1006 13:56:14.604545 4757 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/68bdda02-206a-460a-b6a9-7ab492a1f518-swiftconf\") on node \"crc\" DevicePath \"\"" Oct 06 13:56:14 crc kubenswrapper[4757]: I1006 13:56:14.604558 4757 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/68bdda02-206a-460a-b6a9-7ab492a1f518-etc-swift\") on node \"crc\" DevicePath \"\"" Oct 06 13:56:14 crc kubenswrapper[4757]: I1006 13:56:14.604566 4757 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/68bdda02-206a-460a-b6a9-7ab492a1f518-scripts\") on node \"crc\" DevicePath \"\"" Oct 06 13:56:14 crc kubenswrapper[4757]: I1006 13:56:14.604575 4757 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/68bdda02-206a-460a-b6a9-7ab492a1f518-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 06 13:56:14 crc kubenswrapper[4757]: I1006 13:56:14.604585 4757 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/68bdda02-206a-460a-b6a9-7ab492a1f518-dispersionconf\") on node \"crc\" DevicePath \"\"" Oct 06 13:56:14 crc kubenswrapper[4757]: I1006 13:56:14.604595 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-64ltx\" (UniqueName: \"kubernetes.io/projected/68bdda02-206a-460a-b6a9-7ab492a1f518-kube-api-access-64ltx\") on node \"crc\" DevicePath \"\"" Oct 06 13:56:14 crc kubenswrapper[4757]: I1006 13:56:14.621579 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gptgq\" (UniqueName: \"kubernetes.io/projected/e0f6378b-8aab-41e0-a041-77d6eaa303b7-kube-api-access-gptgq\") pod \"keystone-2941-account-create-7cmrj\" (UID: \"e0f6378b-8aab-41e0-a041-77d6eaa303b7\") " pod="openstack/keystone-2941-account-create-7cmrj" Oct 06 13:56:14 crc kubenswrapper[4757]: I1006 13:56:14.768981 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-5c8d-account-create-q48sg"] Oct 06 13:56:14 crc kubenswrapper[4757]: I1006 13:56:14.771279 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-5c8d-account-create-q48sg" Oct 06 13:56:14 crc kubenswrapper[4757]: I1006 13:56:14.774944 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Oct 06 13:56:14 crc kubenswrapper[4757]: I1006 13:56:14.776527 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-5c8d-account-create-q48sg"] Oct 06 13:56:14 crc kubenswrapper[4757]: I1006 13:56:14.844004 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-2941-account-create-7cmrj" Oct 06 13:56:14 crc kubenswrapper[4757]: I1006 13:56:14.911059 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8jksz\" (UniqueName: \"kubernetes.io/projected/fcfaae90-f485-480d-9736-cbdbbb0e4ffa-kube-api-access-8jksz\") pod \"placement-5c8d-account-create-q48sg\" (UID: \"fcfaae90-f485-480d-9736-cbdbbb0e4ffa\") " pod="openstack/placement-5c8d-account-create-q48sg" Oct 06 13:56:14 crc kubenswrapper[4757]: I1006 13:56:14.943490 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-vpghx" event={"ID":"68bdda02-206a-460a-b6a9-7ab492a1f518","Type":"ContainerDied","Data":"7b7adfbac8ded7cc1a93ea8e6b3d34f55b9321152e1ea886f60a246b4dbe6991"} Oct 06 13:56:14 crc kubenswrapper[4757]: I1006 13:56:14.943534 4757 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7b7adfbac8ded7cc1a93ea8e6b3d34f55b9321152e1ea886f60a246b4dbe6991" Oct 06 13:56:14 crc kubenswrapper[4757]: I1006 13:56:14.943527 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-vpghx" Oct 06 13:56:14 crc kubenswrapper[4757]: I1006 13:56:14.945582 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e","Type":"ContainerStarted","Data":"363e1baa670fcb46278dc06e1f0ddf747b310ed231897f327adc82e7bb2771b2"} Oct 06 13:56:15 crc kubenswrapper[4757]: I1006 13:56:15.012376 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8jksz\" (UniqueName: \"kubernetes.io/projected/fcfaae90-f485-480d-9736-cbdbbb0e4ffa-kube-api-access-8jksz\") pod \"placement-5c8d-account-create-q48sg\" (UID: \"fcfaae90-f485-480d-9736-cbdbbb0e4ffa\") " pod="openstack/placement-5c8d-account-create-q48sg" Oct 06 13:56:15 crc kubenswrapper[4757]: I1006 13:56:15.036622 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8jksz\" (UniqueName: \"kubernetes.io/projected/fcfaae90-f485-480d-9736-cbdbbb0e4ffa-kube-api-access-8jksz\") pod \"placement-5c8d-account-create-q48sg\" (UID: \"fcfaae90-f485-480d-9736-cbdbbb0e4ffa\") " pod="openstack/placement-5c8d-account-create-q48sg" Oct 06 13:56:15 crc kubenswrapper[4757]: I1006 13:56:15.096212 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-5c8d-account-create-q48sg" Oct 06 13:56:15 crc kubenswrapper[4757]: I1006 13:56:15.210064 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-bswtq"] Oct 06 13:56:15 crc kubenswrapper[4757]: I1006 13:56:15.211187 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-bswtq" Oct 06 13:56:15 crc kubenswrapper[4757]: I1006 13:56:15.213071 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Oct 06 13:56:15 crc kubenswrapper[4757]: I1006 13:56:15.214026 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-ksjg8" Oct 06 13:56:15 crc kubenswrapper[4757]: I1006 13:56:15.214909 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b257515d-a807-4a28-8b28-0e6390aa9d42-db-sync-config-data\") pod \"glance-db-sync-bswtq\" (UID: \"b257515d-a807-4a28-8b28-0e6390aa9d42\") " pod="openstack/glance-db-sync-bswtq" Oct 06 13:56:15 crc kubenswrapper[4757]: I1006 13:56:15.214983 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b257515d-a807-4a28-8b28-0e6390aa9d42-config-data\") pod \"glance-db-sync-bswtq\" (UID: \"b257515d-a807-4a28-8b28-0e6390aa9d42\") " pod="openstack/glance-db-sync-bswtq" Oct 06 13:56:15 crc kubenswrapper[4757]: I1006 13:56:15.215006 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k8zmv\" (UniqueName: \"kubernetes.io/projected/b257515d-a807-4a28-8b28-0e6390aa9d42-kube-api-access-k8zmv\") pod \"glance-db-sync-bswtq\" (UID: \"b257515d-a807-4a28-8b28-0e6390aa9d42\") " pod="openstack/glance-db-sync-bswtq" Oct 06 13:56:15 crc kubenswrapper[4757]: I1006 13:56:15.215020 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b257515d-a807-4a28-8b28-0e6390aa9d42-combined-ca-bundle\") pod \"glance-db-sync-bswtq\" (UID: \"b257515d-a807-4a28-8b28-0e6390aa9d42\") " pod="openstack/glance-db-sync-bswtq" Oct 06 13:56:15 crc kubenswrapper[4757]: I1006 13:56:15.218969 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-bswtq"] Oct 06 13:56:15 crc kubenswrapper[4757]: I1006 13:56:15.320283 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b257515d-a807-4a28-8b28-0e6390aa9d42-db-sync-config-data\") pod \"glance-db-sync-bswtq\" (UID: \"b257515d-a807-4a28-8b28-0e6390aa9d42\") " pod="openstack/glance-db-sync-bswtq" Oct 06 13:56:15 crc kubenswrapper[4757]: I1006 13:56:15.320704 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b257515d-a807-4a28-8b28-0e6390aa9d42-config-data\") pod \"glance-db-sync-bswtq\" (UID: \"b257515d-a807-4a28-8b28-0e6390aa9d42\") " pod="openstack/glance-db-sync-bswtq" Oct 06 13:56:15 crc kubenswrapper[4757]: I1006 13:56:15.320736 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k8zmv\" (UniqueName: \"kubernetes.io/projected/b257515d-a807-4a28-8b28-0e6390aa9d42-kube-api-access-k8zmv\") pod \"glance-db-sync-bswtq\" (UID: \"b257515d-a807-4a28-8b28-0e6390aa9d42\") " pod="openstack/glance-db-sync-bswtq" Oct 06 13:56:15 crc kubenswrapper[4757]: I1006 13:56:15.320760 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b257515d-a807-4a28-8b28-0e6390aa9d42-combined-ca-bundle\") pod \"glance-db-sync-bswtq\" (UID: \"b257515d-a807-4a28-8b28-0e6390aa9d42\") " pod="openstack/glance-db-sync-bswtq" Oct 06 13:56:15 crc kubenswrapper[4757]: I1006 13:56:15.325844 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b257515d-a807-4a28-8b28-0e6390aa9d42-config-data\") pod \"glance-db-sync-bswtq\" (UID: \"b257515d-a807-4a28-8b28-0e6390aa9d42\") " pod="openstack/glance-db-sync-bswtq" Oct 06 13:56:15 crc kubenswrapper[4757]: I1006 13:56:15.332570 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-2941-account-create-7cmrj"] Oct 06 13:56:15 crc kubenswrapper[4757]: I1006 13:56:15.336319 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b257515d-a807-4a28-8b28-0e6390aa9d42-combined-ca-bundle\") pod \"glance-db-sync-bswtq\" (UID: \"b257515d-a807-4a28-8b28-0e6390aa9d42\") " pod="openstack/glance-db-sync-bswtq" Oct 06 13:56:15 crc kubenswrapper[4757]: I1006 13:56:15.341874 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b257515d-a807-4a28-8b28-0e6390aa9d42-db-sync-config-data\") pod \"glance-db-sync-bswtq\" (UID: \"b257515d-a807-4a28-8b28-0e6390aa9d42\") " pod="openstack/glance-db-sync-bswtq" Oct 06 13:56:15 crc kubenswrapper[4757]: I1006 13:56:15.341880 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k8zmv\" (UniqueName: \"kubernetes.io/projected/b257515d-a807-4a28-8b28-0e6390aa9d42-kube-api-access-k8zmv\") pod \"glance-db-sync-bswtq\" (UID: \"b257515d-a807-4a28-8b28-0e6390aa9d42\") " pod="openstack/glance-db-sync-bswtq" Oct 06 13:56:15 crc kubenswrapper[4757]: I1006 13:56:15.539947 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-bswtq" Oct 06 13:56:15 crc kubenswrapper[4757]: I1006 13:56:15.569934 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-5c8d-account-create-q48sg"] Oct 06 13:56:15 crc kubenswrapper[4757]: I1006 13:56:15.956353 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-2941-account-create-7cmrj" event={"ID":"e0f6378b-8aab-41e0-a041-77d6eaa303b7","Type":"ContainerStarted","Data":"275da12e97ad2918f8f513a63a2e9ec2e48a957a2487ef120a3e00be65430cf8"} Oct 06 13:56:15 crc kubenswrapper[4757]: I1006 13:56:15.956610 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-2941-account-create-7cmrj" event={"ID":"e0f6378b-8aab-41e0-a041-77d6eaa303b7","Type":"ContainerStarted","Data":"e240bffcbf36ed80ddc72ed571d9e64645a9890eebd48c1e1561347ecba32349"} Oct 06 13:56:15 crc kubenswrapper[4757]: I1006 13:56:15.957574 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-5c8d-account-create-q48sg" event={"ID":"fcfaae90-f485-480d-9736-cbdbbb0e4ffa","Type":"ContainerStarted","Data":"a3d7e27eebbc4b60ce6095c1c6c4522a56191ad028a762e172e625e2020092be"} Oct 06 13:56:15 crc kubenswrapper[4757]: I1006 13:56:15.957594 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-5c8d-account-create-q48sg" event={"ID":"fcfaae90-f485-480d-9736-cbdbbb0e4ffa","Type":"ContainerStarted","Data":"6e5a1219a9cffc1c7fbe36e3739827d35d4910815804d4ffcccd8fdeb8eafa3b"} Oct 06 13:56:15 crc kubenswrapper[4757]: I1006 13:56:15.959731 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e","Type":"ContainerStarted","Data":"4e76a7ffee77c8b8fe521732d3733beb6c04f9473f44fd387b15b45fb090db2a"} Oct 06 13:56:15 crc kubenswrapper[4757]: I1006 13:56:15.986547 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-5c8d-account-create-q48sg" podStartSLOduration=1.9865262860000001 podStartE2EDuration="1.986526286s" podCreationTimestamp="2025-10-06 13:56:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:56:15.97951626 +0000 UTC m=+1064.476834797" watchObservedRunningTime="2025-10-06 13:56:15.986526286 +0000 UTC m=+1064.483844823" Oct 06 13:56:16 crc kubenswrapper[4757]: I1006 13:56:16.144735 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-bswtq"] Oct 06 13:56:16 crc kubenswrapper[4757]: I1006 13:56:16.620159 4757 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-kn7bx" podUID="0efda247-fa18-49db-a37d-1dd28d999ed7" containerName="ovn-controller" probeResult="failure" output=< Oct 06 13:56:16 crc kubenswrapper[4757]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Oct 06 13:56:16 crc kubenswrapper[4757]: > Oct 06 13:56:16 crc kubenswrapper[4757]: I1006 13:56:16.637952 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-fqwwx" Oct 06 13:56:16 crc kubenswrapper[4757]: I1006 13:56:16.648860 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-fqwwx" Oct 06 13:56:16 crc kubenswrapper[4757]: I1006 13:56:16.862872 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-kn7bx-config-s72zd"] Oct 06 13:56:16 crc kubenswrapper[4757]: I1006 13:56:16.864413 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-kn7bx-config-s72zd" Oct 06 13:56:16 crc kubenswrapper[4757]: I1006 13:56:16.868791 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Oct 06 13:56:16 crc kubenswrapper[4757]: I1006 13:56:16.873308 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-kn7bx-config-s72zd"] Oct 06 13:56:17 crc kubenswrapper[4757]: I1006 13:56:17.000811 4757 generic.go:334] "Generic (PLEG): container finished" podID="0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7" containerID="60db55d4ac0f9a0a97f39e30e07ad0326dde67452cb7ced75d9e2b87024ac379" exitCode=0 Oct 06 13:56:17 crc kubenswrapper[4757]: I1006 13:56:17.000865 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7","Type":"ContainerDied","Data":"60db55d4ac0f9a0a97f39e30e07ad0326dde67452cb7ced75d9e2b87024ac379"} Oct 06 13:56:17 crc kubenswrapper[4757]: I1006 13:56:17.004112 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-5c8d-account-create-q48sg" event={"ID":"fcfaae90-f485-480d-9736-cbdbbb0e4ffa","Type":"ContainerDied","Data":"a3d7e27eebbc4b60ce6095c1c6c4522a56191ad028a762e172e625e2020092be"} Oct 06 13:56:17 crc kubenswrapper[4757]: I1006 13:56:17.004288 4757 generic.go:334] "Generic (PLEG): container finished" podID="fcfaae90-f485-480d-9736-cbdbbb0e4ffa" containerID="a3d7e27eebbc4b60ce6095c1c6c4522a56191ad028a762e172e625e2020092be" exitCode=0 Oct 06 13:56:17 crc kubenswrapper[4757]: I1006 13:56:17.006817 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e","Type":"ContainerStarted","Data":"310e750d598b4155f3b7c06f9255cb7e0c3fd72dc7ef791abab66abcde43d44e"} Oct 06 13:56:17 crc kubenswrapper[4757]: I1006 13:56:17.006855 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e","Type":"ContainerStarted","Data":"9e6653a1be3ad11eb600a3fb5b459d7efe07dd5b5f22cddfe18a97d785f0ab92"} Oct 06 13:56:17 crc kubenswrapper[4757]: I1006 13:56:17.006881 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e","Type":"ContainerStarted","Data":"586142d874234725ea31d7fbbe5be2188dabc0eb32d3421991ab34ac2db87ceb"} Oct 06 13:56:17 crc kubenswrapper[4757]: I1006 13:56:17.010330 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-bswtq" event={"ID":"b257515d-a807-4a28-8b28-0e6390aa9d42","Type":"ContainerStarted","Data":"d7718981c31b8f0fcedf260f4db1b1e4a3202fc0e3bb0e6bf83ac1443cd9be4f"} Oct 06 13:56:17 crc kubenswrapper[4757]: I1006 13:56:17.011915 4757 generic.go:334] "Generic (PLEG): container finished" podID="e0f6378b-8aab-41e0-a041-77d6eaa303b7" containerID="275da12e97ad2918f8f513a63a2e9ec2e48a957a2487ef120a3e00be65430cf8" exitCode=0 Oct 06 13:56:17 crc kubenswrapper[4757]: I1006 13:56:17.011968 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-2941-account-create-7cmrj" event={"ID":"e0f6378b-8aab-41e0-a041-77d6eaa303b7","Type":"ContainerDied","Data":"275da12e97ad2918f8f513a63a2e9ec2e48a957a2487ef120a3e00be65430cf8"} Oct 06 13:56:17 crc kubenswrapper[4757]: I1006 13:56:17.016126 4757 generic.go:334] "Generic (PLEG): container finished" podID="cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61" containerID="42364390ce15ba7722dd995ec200f051b040d7e11226cb59a9ad77032b3171c4" exitCode=0 Oct 06 13:56:17 crc kubenswrapper[4757]: I1006 13:56:17.016192 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61","Type":"ContainerDied","Data":"42364390ce15ba7722dd995ec200f051b040d7e11226cb59a9ad77032b3171c4"} Oct 06 13:56:17 crc kubenswrapper[4757]: I1006 13:56:17.064463 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/5df8c59f-20e4-4d9b-9c2d-e1192cd8d3d1-additional-scripts\") pod \"ovn-controller-kn7bx-config-s72zd\" (UID: \"5df8c59f-20e4-4d9b-9c2d-e1192cd8d3d1\") " pod="openstack/ovn-controller-kn7bx-config-s72zd" Oct 06 13:56:17 crc kubenswrapper[4757]: I1006 13:56:17.065241 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/5df8c59f-20e4-4d9b-9c2d-e1192cd8d3d1-var-log-ovn\") pod \"ovn-controller-kn7bx-config-s72zd\" (UID: \"5df8c59f-20e4-4d9b-9c2d-e1192cd8d3d1\") " pod="openstack/ovn-controller-kn7bx-config-s72zd" Oct 06 13:56:17 crc kubenswrapper[4757]: I1006 13:56:17.065299 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/5df8c59f-20e4-4d9b-9c2d-e1192cd8d3d1-var-run\") pod \"ovn-controller-kn7bx-config-s72zd\" (UID: \"5df8c59f-20e4-4d9b-9c2d-e1192cd8d3d1\") " pod="openstack/ovn-controller-kn7bx-config-s72zd" Oct 06 13:56:17 crc kubenswrapper[4757]: I1006 13:56:17.065359 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/5df8c59f-20e4-4d9b-9c2d-e1192cd8d3d1-var-run-ovn\") pod \"ovn-controller-kn7bx-config-s72zd\" (UID: \"5df8c59f-20e4-4d9b-9c2d-e1192cd8d3d1\") " pod="openstack/ovn-controller-kn7bx-config-s72zd" Oct 06 13:56:17 crc kubenswrapper[4757]: I1006 13:56:17.065379 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5df8c59f-20e4-4d9b-9c2d-e1192cd8d3d1-scripts\") pod \"ovn-controller-kn7bx-config-s72zd\" (UID: \"5df8c59f-20e4-4d9b-9c2d-e1192cd8d3d1\") " pod="openstack/ovn-controller-kn7bx-config-s72zd" Oct 06 13:56:17 crc kubenswrapper[4757]: I1006 13:56:17.065534 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vfdbg\" (UniqueName: \"kubernetes.io/projected/5df8c59f-20e4-4d9b-9c2d-e1192cd8d3d1-kube-api-access-vfdbg\") pod \"ovn-controller-kn7bx-config-s72zd\" (UID: \"5df8c59f-20e4-4d9b-9c2d-e1192cd8d3d1\") " pod="openstack/ovn-controller-kn7bx-config-s72zd" Oct 06 13:56:17 crc kubenswrapper[4757]: I1006 13:56:17.169072 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/5df8c59f-20e4-4d9b-9c2d-e1192cd8d3d1-additional-scripts\") pod \"ovn-controller-kn7bx-config-s72zd\" (UID: \"5df8c59f-20e4-4d9b-9c2d-e1192cd8d3d1\") " pod="openstack/ovn-controller-kn7bx-config-s72zd" Oct 06 13:56:17 crc kubenswrapper[4757]: I1006 13:56:17.169175 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/5df8c59f-20e4-4d9b-9c2d-e1192cd8d3d1-var-log-ovn\") pod \"ovn-controller-kn7bx-config-s72zd\" (UID: \"5df8c59f-20e4-4d9b-9c2d-e1192cd8d3d1\") " pod="openstack/ovn-controller-kn7bx-config-s72zd" Oct 06 13:56:17 crc kubenswrapper[4757]: I1006 13:56:17.169208 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/5df8c59f-20e4-4d9b-9c2d-e1192cd8d3d1-var-run\") pod \"ovn-controller-kn7bx-config-s72zd\" (UID: \"5df8c59f-20e4-4d9b-9c2d-e1192cd8d3d1\") " pod="openstack/ovn-controller-kn7bx-config-s72zd" Oct 06 13:56:17 crc kubenswrapper[4757]: I1006 13:56:17.169292 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/5df8c59f-20e4-4d9b-9c2d-e1192cd8d3d1-var-run-ovn\") pod \"ovn-controller-kn7bx-config-s72zd\" (UID: \"5df8c59f-20e4-4d9b-9c2d-e1192cd8d3d1\") " pod="openstack/ovn-controller-kn7bx-config-s72zd" Oct 06 13:56:17 crc kubenswrapper[4757]: I1006 13:56:17.169311 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5df8c59f-20e4-4d9b-9c2d-e1192cd8d3d1-scripts\") pod \"ovn-controller-kn7bx-config-s72zd\" (UID: \"5df8c59f-20e4-4d9b-9c2d-e1192cd8d3d1\") " pod="openstack/ovn-controller-kn7bx-config-s72zd" Oct 06 13:56:17 crc kubenswrapper[4757]: I1006 13:56:17.169332 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vfdbg\" (UniqueName: \"kubernetes.io/projected/5df8c59f-20e4-4d9b-9c2d-e1192cd8d3d1-kube-api-access-vfdbg\") pod \"ovn-controller-kn7bx-config-s72zd\" (UID: \"5df8c59f-20e4-4d9b-9c2d-e1192cd8d3d1\") " pod="openstack/ovn-controller-kn7bx-config-s72zd" Oct 06 13:56:17 crc kubenswrapper[4757]: I1006 13:56:17.169732 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/5df8c59f-20e4-4d9b-9c2d-e1192cd8d3d1-var-run\") pod \"ovn-controller-kn7bx-config-s72zd\" (UID: \"5df8c59f-20e4-4d9b-9c2d-e1192cd8d3d1\") " pod="openstack/ovn-controller-kn7bx-config-s72zd" Oct 06 13:56:17 crc kubenswrapper[4757]: I1006 13:56:17.170908 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/5df8c59f-20e4-4d9b-9c2d-e1192cd8d3d1-var-run-ovn\") pod \"ovn-controller-kn7bx-config-s72zd\" (UID: \"5df8c59f-20e4-4d9b-9c2d-e1192cd8d3d1\") " pod="openstack/ovn-controller-kn7bx-config-s72zd" Oct 06 13:56:17 crc kubenswrapper[4757]: I1006 13:56:17.171084 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/5df8c59f-20e4-4d9b-9c2d-e1192cd8d3d1-additional-scripts\") pod \"ovn-controller-kn7bx-config-s72zd\" (UID: \"5df8c59f-20e4-4d9b-9c2d-e1192cd8d3d1\") " pod="openstack/ovn-controller-kn7bx-config-s72zd" Oct 06 13:56:17 crc kubenswrapper[4757]: I1006 13:56:17.178352 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/5df8c59f-20e4-4d9b-9c2d-e1192cd8d3d1-var-log-ovn\") pod \"ovn-controller-kn7bx-config-s72zd\" (UID: \"5df8c59f-20e4-4d9b-9c2d-e1192cd8d3d1\") " pod="openstack/ovn-controller-kn7bx-config-s72zd" Oct 06 13:56:17 crc kubenswrapper[4757]: I1006 13:56:17.186801 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5df8c59f-20e4-4d9b-9c2d-e1192cd8d3d1-scripts\") pod \"ovn-controller-kn7bx-config-s72zd\" (UID: \"5df8c59f-20e4-4d9b-9c2d-e1192cd8d3d1\") " pod="openstack/ovn-controller-kn7bx-config-s72zd" Oct 06 13:56:17 crc kubenswrapper[4757]: I1006 13:56:17.207439 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vfdbg\" (UniqueName: \"kubernetes.io/projected/5df8c59f-20e4-4d9b-9c2d-e1192cd8d3d1-kube-api-access-vfdbg\") pod \"ovn-controller-kn7bx-config-s72zd\" (UID: \"5df8c59f-20e4-4d9b-9c2d-e1192cd8d3d1\") " pod="openstack/ovn-controller-kn7bx-config-s72zd" Oct 06 13:56:17 crc kubenswrapper[4757]: I1006 13:56:17.379523 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-2941-account-create-7cmrj" Oct 06 13:56:17 crc kubenswrapper[4757]: I1006 13:56:17.478323 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gptgq\" (UniqueName: \"kubernetes.io/projected/e0f6378b-8aab-41e0-a041-77d6eaa303b7-kube-api-access-gptgq\") pod \"e0f6378b-8aab-41e0-a041-77d6eaa303b7\" (UID: \"e0f6378b-8aab-41e0-a041-77d6eaa303b7\") " Oct 06 13:56:17 crc kubenswrapper[4757]: I1006 13:56:17.480704 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-kn7bx-config-s72zd" Oct 06 13:56:17 crc kubenswrapper[4757]: I1006 13:56:17.482398 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e0f6378b-8aab-41e0-a041-77d6eaa303b7-kube-api-access-gptgq" (OuterVolumeSpecName: "kube-api-access-gptgq") pod "e0f6378b-8aab-41e0-a041-77d6eaa303b7" (UID: "e0f6378b-8aab-41e0-a041-77d6eaa303b7"). InnerVolumeSpecName "kube-api-access-gptgq". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:56:17 crc kubenswrapper[4757]: I1006 13:56:17.580696 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gptgq\" (UniqueName: \"kubernetes.io/projected/e0f6378b-8aab-41e0-a041-77d6eaa303b7-kube-api-access-gptgq\") on node \"crc\" DevicePath \"\"" Oct 06 13:56:17 crc kubenswrapper[4757]: I1006 13:56:17.928984 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-kn7bx-config-s72zd"] Oct 06 13:56:17 crc kubenswrapper[4757]: W1006 13:56:17.957896 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5df8c59f_20e4_4d9b_9c2d_e1192cd8d3d1.slice/crio-8e8a4fde4cc15419ab56b7eecd4c44b1fa0c3e7004728ef44877f0f2f845db8c WatchSource:0}: Error finding container 8e8a4fde4cc15419ab56b7eecd4c44b1fa0c3e7004728ef44877f0f2f845db8c: Status 404 returned error can't find the container with id 8e8a4fde4cc15419ab56b7eecd4c44b1fa0c3e7004728ef44877f0f2f845db8c Oct 06 13:56:18 crc kubenswrapper[4757]: I1006 13:56:18.034810 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7","Type":"ContainerStarted","Data":"1512b2729e3ce1fbedadd844006578943dc275809b766a9088f97452d877e7e7"} Oct 06 13:56:18 crc kubenswrapper[4757]: I1006 13:56:18.035117 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Oct 06 13:56:18 crc kubenswrapper[4757]: I1006 13:56:18.038263 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-kn7bx-config-s72zd" event={"ID":"5df8c59f-20e4-4d9b-9c2d-e1192cd8d3d1","Type":"ContainerStarted","Data":"8e8a4fde4cc15419ab56b7eecd4c44b1fa0c3e7004728ef44877f0f2f845db8c"} Oct 06 13:56:18 crc kubenswrapper[4757]: I1006 13:56:18.041002 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-2941-account-create-7cmrj" event={"ID":"e0f6378b-8aab-41e0-a041-77d6eaa303b7","Type":"ContainerDied","Data":"e240bffcbf36ed80ddc72ed571d9e64645a9890eebd48c1e1561347ecba32349"} Oct 06 13:56:18 crc kubenswrapper[4757]: I1006 13:56:18.041030 4757 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e240bffcbf36ed80ddc72ed571d9e64645a9890eebd48c1e1561347ecba32349" Oct 06 13:56:18 crc kubenswrapper[4757]: I1006 13:56:18.041075 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-2941-account-create-7cmrj" Oct 06 13:56:18 crc kubenswrapper[4757]: I1006 13:56:18.061135 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=50.22582231 podStartE2EDuration="58.06111851s" podCreationTimestamp="2025-10-06 13:55:20 +0000 UTC" firstStartedPulling="2025-10-06 13:55:33.437308133 +0000 UTC m=+1021.934626670" lastFinishedPulling="2025-10-06 13:55:41.272604333 +0000 UTC m=+1029.769922870" observedRunningTime="2025-10-06 13:56:18.060493061 +0000 UTC m=+1066.557811598" watchObservedRunningTime="2025-10-06 13:56:18.06111851 +0000 UTC m=+1066.558437047" Oct 06 13:56:18 crc kubenswrapper[4757]: I1006 13:56:18.063165 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61","Type":"ContainerStarted","Data":"a1511484b8d6f60a8bb8cc2011aa0f1a81bd98179bc7f090f9aec83ec900815b"} Oct 06 13:56:18 crc kubenswrapper[4757]: I1006 13:56:18.063594 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Oct 06 13:56:18 crc kubenswrapper[4757]: I1006 13:56:18.107183 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=49.450100574 podStartE2EDuration="58.107158557s" podCreationTimestamp="2025-10-06 13:55:20 +0000 UTC" firstStartedPulling="2025-10-06 13:55:32.613177906 +0000 UTC m=+1021.110496443" lastFinishedPulling="2025-10-06 13:55:41.270235889 +0000 UTC m=+1029.767554426" observedRunningTime="2025-10-06 13:56:18.096665214 +0000 UTC m=+1066.593983781" watchObservedRunningTime="2025-10-06 13:56:18.107158557 +0000 UTC m=+1066.604477114" Oct 06 13:56:18 crc kubenswrapper[4757]: I1006 13:56:18.430213 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-5c8d-account-create-q48sg" Oct 06 13:56:18 crc kubenswrapper[4757]: I1006 13:56:18.500725 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8jksz\" (UniqueName: \"kubernetes.io/projected/fcfaae90-f485-480d-9736-cbdbbb0e4ffa-kube-api-access-8jksz\") pod \"fcfaae90-f485-480d-9736-cbdbbb0e4ffa\" (UID: \"fcfaae90-f485-480d-9736-cbdbbb0e4ffa\") " Oct 06 13:56:18 crc kubenswrapper[4757]: I1006 13:56:18.507301 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fcfaae90-f485-480d-9736-cbdbbb0e4ffa-kube-api-access-8jksz" (OuterVolumeSpecName: "kube-api-access-8jksz") pod "fcfaae90-f485-480d-9736-cbdbbb0e4ffa" (UID: "fcfaae90-f485-480d-9736-cbdbbb0e4ffa"). InnerVolumeSpecName "kube-api-access-8jksz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:56:18 crc kubenswrapper[4757]: I1006 13:56:18.602610 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8jksz\" (UniqueName: \"kubernetes.io/projected/fcfaae90-f485-480d-9736-cbdbbb0e4ffa-kube-api-access-8jksz\") on node \"crc\" DevicePath \"\"" Oct 06 13:56:19 crc kubenswrapper[4757]: I1006 13:56:19.073527 4757 generic.go:334] "Generic (PLEG): container finished" podID="5df8c59f-20e4-4d9b-9c2d-e1192cd8d3d1" containerID="5b260f81b1e0965e466a896729e6f3b179fbab40f5ade6c74a4f99ade063546c" exitCode=0 Oct 06 13:56:19 crc kubenswrapper[4757]: I1006 13:56:19.073645 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-kn7bx-config-s72zd" event={"ID":"5df8c59f-20e4-4d9b-9c2d-e1192cd8d3d1","Type":"ContainerDied","Data":"5b260f81b1e0965e466a896729e6f3b179fbab40f5ade6c74a4f99ade063546c"} Oct 06 13:56:19 crc kubenswrapper[4757]: I1006 13:56:19.076158 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-5c8d-account-create-q48sg" event={"ID":"fcfaae90-f485-480d-9736-cbdbbb0e4ffa","Type":"ContainerDied","Data":"6e5a1219a9cffc1c7fbe36e3739827d35d4910815804d4ffcccd8fdeb8eafa3b"} Oct 06 13:56:19 crc kubenswrapper[4757]: I1006 13:56:19.076190 4757 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6e5a1219a9cffc1c7fbe36e3739827d35d4910815804d4ffcccd8fdeb8eafa3b" Oct 06 13:56:19 crc kubenswrapper[4757]: I1006 13:56:19.076320 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-5c8d-account-create-q48sg" Oct 06 13:56:19 crc kubenswrapper[4757]: I1006 13:56:19.082083 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e","Type":"ContainerStarted","Data":"2ed56ae25da969c6f1c6035ee758bb3bdff3f5231db2cbaa897ed5f375a124d2"} Oct 06 13:56:19 crc kubenswrapper[4757]: I1006 13:56:19.082146 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e","Type":"ContainerStarted","Data":"aa31fc47a53cc26c6c2aeebfb6be0d53ecc196f7288a242d80a2314eaefca60c"} Oct 06 13:56:19 crc kubenswrapper[4757]: I1006 13:56:19.082165 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e","Type":"ContainerStarted","Data":"b197392cb5a2b7307583df8b0fbe8691aa093fba29ccd5a59dcfd082146a01a1"} Oct 06 13:56:19 crc kubenswrapper[4757]: I1006 13:56:19.082177 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e","Type":"ContainerStarted","Data":"4a3e474b0906c602fa42d67ff9a1ce42690ad02c00ae4982e1e8e4c832a3b23e"} Oct 06 13:56:20 crc kubenswrapper[4757]: I1006 13:56:20.098929 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e","Type":"ContainerStarted","Data":"128957938cd8f8a94388394a5a1f34e6af0a9c8532220084d8c4cd91a84f64e4"} Oct 06 13:56:20 crc kubenswrapper[4757]: I1006 13:56:20.423664 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-kn7bx-config-s72zd" Oct 06 13:56:20 crc kubenswrapper[4757]: I1006 13:56:20.538716 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/5df8c59f-20e4-4d9b-9c2d-e1192cd8d3d1-additional-scripts\") pod \"5df8c59f-20e4-4d9b-9c2d-e1192cd8d3d1\" (UID: \"5df8c59f-20e4-4d9b-9c2d-e1192cd8d3d1\") " Oct 06 13:56:20 crc kubenswrapper[4757]: I1006 13:56:20.538806 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5df8c59f-20e4-4d9b-9c2d-e1192cd8d3d1-scripts\") pod \"5df8c59f-20e4-4d9b-9c2d-e1192cd8d3d1\" (UID: \"5df8c59f-20e4-4d9b-9c2d-e1192cd8d3d1\") " Oct 06 13:56:20 crc kubenswrapper[4757]: I1006 13:56:20.538849 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/5df8c59f-20e4-4d9b-9c2d-e1192cd8d3d1-var-run-ovn\") pod \"5df8c59f-20e4-4d9b-9c2d-e1192cd8d3d1\" (UID: \"5df8c59f-20e4-4d9b-9c2d-e1192cd8d3d1\") " Oct 06 13:56:20 crc kubenswrapper[4757]: I1006 13:56:20.538906 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vfdbg\" (UniqueName: \"kubernetes.io/projected/5df8c59f-20e4-4d9b-9c2d-e1192cd8d3d1-kube-api-access-vfdbg\") pod \"5df8c59f-20e4-4d9b-9c2d-e1192cd8d3d1\" (UID: \"5df8c59f-20e4-4d9b-9c2d-e1192cd8d3d1\") " Oct 06 13:56:20 crc kubenswrapper[4757]: I1006 13:56:20.538941 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/5df8c59f-20e4-4d9b-9c2d-e1192cd8d3d1-var-run\") pod \"5df8c59f-20e4-4d9b-9c2d-e1192cd8d3d1\" (UID: \"5df8c59f-20e4-4d9b-9c2d-e1192cd8d3d1\") " Oct 06 13:56:20 crc kubenswrapper[4757]: I1006 13:56:20.538962 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/5df8c59f-20e4-4d9b-9c2d-e1192cd8d3d1-var-log-ovn\") pod \"5df8c59f-20e4-4d9b-9c2d-e1192cd8d3d1\" (UID: \"5df8c59f-20e4-4d9b-9c2d-e1192cd8d3d1\") " Oct 06 13:56:20 crc kubenswrapper[4757]: I1006 13:56:20.539408 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5df8c59f-20e4-4d9b-9c2d-e1192cd8d3d1-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "5df8c59f-20e4-4d9b-9c2d-e1192cd8d3d1" (UID: "5df8c59f-20e4-4d9b-9c2d-e1192cd8d3d1"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 06 13:56:20 crc kubenswrapper[4757]: I1006 13:56:20.539451 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5df8c59f-20e4-4d9b-9c2d-e1192cd8d3d1-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "5df8c59f-20e4-4d9b-9c2d-e1192cd8d3d1" (UID: "5df8c59f-20e4-4d9b-9c2d-e1192cd8d3d1"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 06 13:56:20 crc kubenswrapper[4757]: I1006 13:56:20.539515 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5df8c59f-20e4-4d9b-9c2d-e1192cd8d3d1-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "5df8c59f-20e4-4d9b-9c2d-e1192cd8d3d1" (UID: "5df8c59f-20e4-4d9b-9c2d-e1192cd8d3d1"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:56:20 crc kubenswrapper[4757]: I1006 13:56:20.539548 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5df8c59f-20e4-4d9b-9c2d-e1192cd8d3d1-var-run" (OuterVolumeSpecName: "var-run") pod "5df8c59f-20e4-4d9b-9c2d-e1192cd8d3d1" (UID: "5df8c59f-20e4-4d9b-9c2d-e1192cd8d3d1"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 06 13:56:20 crc kubenswrapper[4757]: I1006 13:56:20.539774 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5df8c59f-20e4-4d9b-9c2d-e1192cd8d3d1-scripts" (OuterVolumeSpecName: "scripts") pod "5df8c59f-20e4-4d9b-9c2d-e1192cd8d3d1" (UID: "5df8c59f-20e4-4d9b-9c2d-e1192cd8d3d1"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:56:20 crc kubenswrapper[4757]: I1006 13:56:20.569212 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5df8c59f-20e4-4d9b-9c2d-e1192cd8d3d1-kube-api-access-vfdbg" (OuterVolumeSpecName: "kube-api-access-vfdbg") pod "5df8c59f-20e4-4d9b-9c2d-e1192cd8d3d1" (UID: "5df8c59f-20e4-4d9b-9c2d-e1192cd8d3d1"). InnerVolumeSpecName "kube-api-access-vfdbg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:56:20 crc kubenswrapper[4757]: I1006 13:56:20.640673 4757 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/5df8c59f-20e4-4d9b-9c2d-e1192cd8d3d1-additional-scripts\") on node \"crc\" DevicePath \"\"" Oct 06 13:56:20 crc kubenswrapper[4757]: I1006 13:56:20.640722 4757 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5df8c59f-20e4-4d9b-9c2d-e1192cd8d3d1-scripts\") on node \"crc\" DevicePath \"\"" Oct 06 13:56:20 crc kubenswrapper[4757]: I1006 13:56:20.640733 4757 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/5df8c59f-20e4-4d9b-9c2d-e1192cd8d3d1-var-run-ovn\") on node \"crc\" DevicePath \"\"" Oct 06 13:56:20 crc kubenswrapper[4757]: I1006 13:56:20.640745 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vfdbg\" (UniqueName: \"kubernetes.io/projected/5df8c59f-20e4-4d9b-9c2d-e1192cd8d3d1-kube-api-access-vfdbg\") on node \"crc\" DevicePath \"\"" Oct 06 13:56:20 crc kubenswrapper[4757]: I1006 13:56:20.640755 4757 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/5df8c59f-20e4-4d9b-9c2d-e1192cd8d3d1-var-log-ovn\") on node \"crc\" DevicePath \"\"" Oct 06 13:56:20 crc kubenswrapper[4757]: I1006 13:56:20.640780 4757 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/5df8c59f-20e4-4d9b-9c2d-e1192cd8d3d1-var-run\") on node \"crc\" DevicePath \"\"" Oct 06 13:56:21 crc kubenswrapper[4757]: I1006 13:56:21.115939 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-kn7bx-config-s72zd" event={"ID":"5df8c59f-20e4-4d9b-9c2d-e1192cd8d3d1","Type":"ContainerDied","Data":"8e8a4fde4cc15419ab56b7eecd4c44b1fa0c3e7004728ef44877f0f2f845db8c"} Oct 06 13:56:21 crc kubenswrapper[4757]: I1006 13:56:21.116232 4757 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8e8a4fde4cc15419ab56b7eecd4c44b1fa0c3e7004728ef44877f0f2f845db8c" Oct 06 13:56:21 crc kubenswrapper[4757]: I1006 13:56:21.116245 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-kn7bx-config-s72zd" Oct 06 13:56:21 crc kubenswrapper[4757]: I1006 13:56:21.131889 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e","Type":"ContainerStarted","Data":"fdf14c9e9a23dfbae4c2900b46030a75c455df62b46a889978d4c18782fa297e"} Oct 06 13:56:21 crc kubenswrapper[4757]: I1006 13:56:21.131940 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e","Type":"ContainerStarted","Data":"b83693af5dd62fa1415425945aa0fdfbeb7c951800bfc45ba952ad8e5300972e"} Oct 06 13:56:21 crc kubenswrapper[4757]: I1006 13:56:21.131956 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e","Type":"ContainerStarted","Data":"c590ad932f6bd2ffc90af02ba2b42481210297495161a7a724ec4887afc67fa4"} Oct 06 13:56:21 crc kubenswrapper[4757]: I1006 13:56:21.131968 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e","Type":"ContainerStarted","Data":"425e4cf04bb701acf0bd6544c54c034243bb49ca6b9336c4557b3370b950c0a1"} Oct 06 13:56:21 crc kubenswrapper[4757]: I1006 13:56:21.131979 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e","Type":"ContainerStarted","Data":"47cf60ed76e43b20f51da65306480b7f176e194d4f0852c1c215ef43e7f66636"} Oct 06 13:56:21 crc kubenswrapper[4757]: I1006 13:56:21.524241 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-kn7bx-config-s72zd"] Oct 06 13:56:21 crc kubenswrapper[4757]: I1006 13:56:21.530129 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-kn7bx-config-s72zd"] Oct 06 13:56:21 crc kubenswrapper[4757]: I1006 13:56:21.639037 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-kn7bx" Oct 06 13:56:22 crc kubenswrapper[4757]: I1006 13:56:22.143908 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e","Type":"ContainerStarted","Data":"a8dbabfcf162761d8bcdf1b87762716bfadb00917af0e64430a06f6608c87e0b"} Oct 06 13:56:22 crc kubenswrapper[4757]: I1006 13:56:22.207954 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-storage-0" podStartSLOduration=20.898856506 podStartE2EDuration="26.207932416s" podCreationTimestamp="2025-10-06 13:55:56 +0000 UTC" firstStartedPulling="2025-10-06 13:56:14.463618362 +0000 UTC m=+1062.960936919" lastFinishedPulling="2025-10-06 13:56:19.772694292 +0000 UTC m=+1068.270012829" observedRunningTime="2025-10-06 13:56:22.181541123 +0000 UTC m=+1070.678859680" watchObservedRunningTime="2025-10-06 13:56:22.207932416 +0000 UTC m=+1070.705250953" Oct 06 13:56:22 crc kubenswrapper[4757]: I1006 13:56:22.222458 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5df8c59f-20e4-4d9b-9c2d-e1192cd8d3d1" path="/var/lib/kubelet/pods/5df8c59f-20e4-4d9b-9c2d-e1192cd8d3d1/volumes" Oct 06 13:56:22 crc kubenswrapper[4757]: I1006 13:56:22.459741 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7fb487d997-ctdfh"] Oct 06 13:56:22 crc kubenswrapper[4757]: E1006 13:56:22.460046 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fcfaae90-f485-480d-9736-cbdbbb0e4ffa" containerName="mariadb-account-create" Oct 06 13:56:22 crc kubenswrapper[4757]: I1006 13:56:22.460059 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="fcfaae90-f485-480d-9736-cbdbbb0e4ffa" containerName="mariadb-account-create" Oct 06 13:56:22 crc kubenswrapper[4757]: E1006 13:56:22.460085 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0f6378b-8aab-41e0-a041-77d6eaa303b7" containerName="mariadb-account-create" Oct 06 13:56:22 crc kubenswrapper[4757]: I1006 13:56:22.460107 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0f6378b-8aab-41e0-a041-77d6eaa303b7" containerName="mariadb-account-create" Oct 06 13:56:22 crc kubenswrapper[4757]: E1006 13:56:22.460120 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5df8c59f-20e4-4d9b-9c2d-e1192cd8d3d1" containerName="ovn-config" Oct 06 13:56:22 crc kubenswrapper[4757]: I1006 13:56:22.460128 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="5df8c59f-20e4-4d9b-9c2d-e1192cd8d3d1" containerName="ovn-config" Oct 06 13:56:22 crc kubenswrapper[4757]: I1006 13:56:22.460274 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="e0f6378b-8aab-41e0-a041-77d6eaa303b7" containerName="mariadb-account-create" Oct 06 13:56:22 crc kubenswrapper[4757]: I1006 13:56:22.460293 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="5df8c59f-20e4-4d9b-9c2d-e1192cd8d3d1" containerName="ovn-config" Oct 06 13:56:22 crc kubenswrapper[4757]: I1006 13:56:22.460313 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="fcfaae90-f485-480d-9736-cbdbbb0e4ffa" containerName="mariadb-account-create" Oct 06 13:56:22 crc kubenswrapper[4757]: I1006 13:56:22.461116 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7fb487d997-ctdfh" Oct 06 13:56:22 crc kubenswrapper[4757]: I1006 13:56:22.462848 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-swift-storage-0" Oct 06 13:56:22 crc kubenswrapper[4757]: I1006 13:56:22.480537 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7fb487d997-ctdfh"] Oct 06 13:56:22 crc kubenswrapper[4757]: I1006 13:56:22.578423 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/71d18dba-d344-4779-9b18-cecbe20b22eb-ovsdbserver-sb\") pod \"dnsmasq-dns-7fb487d997-ctdfh\" (UID: \"71d18dba-d344-4779-9b18-cecbe20b22eb\") " pod="openstack/dnsmasq-dns-7fb487d997-ctdfh" Oct 06 13:56:22 crc kubenswrapper[4757]: I1006 13:56:22.578486 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-69x4d\" (UniqueName: \"kubernetes.io/projected/71d18dba-d344-4779-9b18-cecbe20b22eb-kube-api-access-69x4d\") pod \"dnsmasq-dns-7fb487d997-ctdfh\" (UID: \"71d18dba-d344-4779-9b18-cecbe20b22eb\") " pod="openstack/dnsmasq-dns-7fb487d997-ctdfh" Oct 06 13:56:22 crc kubenswrapper[4757]: I1006 13:56:22.578952 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/71d18dba-d344-4779-9b18-cecbe20b22eb-ovsdbserver-nb\") pod \"dnsmasq-dns-7fb487d997-ctdfh\" (UID: \"71d18dba-d344-4779-9b18-cecbe20b22eb\") " pod="openstack/dnsmasq-dns-7fb487d997-ctdfh" Oct 06 13:56:22 crc kubenswrapper[4757]: I1006 13:56:22.579010 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/71d18dba-d344-4779-9b18-cecbe20b22eb-dns-svc\") pod \"dnsmasq-dns-7fb487d997-ctdfh\" (UID: \"71d18dba-d344-4779-9b18-cecbe20b22eb\") " pod="openstack/dnsmasq-dns-7fb487d997-ctdfh" Oct 06 13:56:22 crc kubenswrapper[4757]: I1006 13:56:22.579053 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/71d18dba-d344-4779-9b18-cecbe20b22eb-config\") pod \"dnsmasq-dns-7fb487d997-ctdfh\" (UID: \"71d18dba-d344-4779-9b18-cecbe20b22eb\") " pod="openstack/dnsmasq-dns-7fb487d997-ctdfh" Oct 06 13:56:22 crc kubenswrapper[4757]: I1006 13:56:22.579146 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/71d18dba-d344-4779-9b18-cecbe20b22eb-dns-swift-storage-0\") pod \"dnsmasq-dns-7fb487d997-ctdfh\" (UID: \"71d18dba-d344-4779-9b18-cecbe20b22eb\") " pod="openstack/dnsmasq-dns-7fb487d997-ctdfh" Oct 06 13:56:22 crc kubenswrapper[4757]: I1006 13:56:22.680185 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/71d18dba-d344-4779-9b18-cecbe20b22eb-dns-swift-storage-0\") pod \"dnsmasq-dns-7fb487d997-ctdfh\" (UID: \"71d18dba-d344-4779-9b18-cecbe20b22eb\") " pod="openstack/dnsmasq-dns-7fb487d997-ctdfh" Oct 06 13:56:22 crc kubenswrapper[4757]: I1006 13:56:22.680265 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/71d18dba-d344-4779-9b18-cecbe20b22eb-ovsdbserver-sb\") pod \"dnsmasq-dns-7fb487d997-ctdfh\" (UID: \"71d18dba-d344-4779-9b18-cecbe20b22eb\") " pod="openstack/dnsmasq-dns-7fb487d997-ctdfh" Oct 06 13:56:22 crc kubenswrapper[4757]: I1006 13:56:22.680300 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-69x4d\" (UniqueName: \"kubernetes.io/projected/71d18dba-d344-4779-9b18-cecbe20b22eb-kube-api-access-69x4d\") pod \"dnsmasq-dns-7fb487d997-ctdfh\" (UID: \"71d18dba-d344-4779-9b18-cecbe20b22eb\") " pod="openstack/dnsmasq-dns-7fb487d997-ctdfh" Oct 06 13:56:22 crc kubenswrapper[4757]: I1006 13:56:22.680415 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/71d18dba-d344-4779-9b18-cecbe20b22eb-ovsdbserver-nb\") pod \"dnsmasq-dns-7fb487d997-ctdfh\" (UID: \"71d18dba-d344-4779-9b18-cecbe20b22eb\") " pod="openstack/dnsmasq-dns-7fb487d997-ctdfh" Oct 06 13:56:22 crc kubenswrapper[4757]: I1006 13:56:22.680440 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/71d18dba-d344-4779-9b18-cecbe20b22eb-dns-svc\") pod \"dnsmasq-dns-7fb487d997-ctdfh\" (UID: \"71d18dba-d344-4779-9b18-cecbe20b22eb\") " pod="openstack/dnsmasq-dns-7fb487d997-ctdfh" Oct 06 13:56:22 crc kubenswrapper[4757]: I1006 13:56:22.680465 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/71d18dba-d344-4779-9b18-cecbe20b22eb-config\") pod \"dnsmasq-dns-7fb487d997-ctdfh\" (UID: \"71d18dba-d344-4779-9b18-cecbe20b22eb\") " pod="openstack/dnsmasq-dns-7fb487d997-ctdfh" Oct 06 13:56:22 crc kubenswrapper[4757]: I1006 13:56:22.681900 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/71d18dba-d344-4779-9b18-cecbe20b22eb-ovsdbserver-nb\") pod \"dnsmasq-dns-7fb487d997-ctdfh\" (UID: \"71d18dba-d344-4779-9b18-cecbe20b22eb\") " pod="openstack/dnsmasq-dns-7fb487d997-ctdfh" Oct 06 13:56:22 crc kubenswrapper[4757]: I1006 13:56:22.682233 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/71d18dba-d344-4779-9b18-cecbe20b22eb-dns-svc\") pod \"dnsmasq-dns-7fb487d997-ctdfh\" (UID: \"71d18dba-d344-4779-9b18-cecbe20b22eb\") " pod="openstack/dnsmasq-dns-7fb487d997-ctdfh" Oct 06 13:56:22 crc kubenswrapper[4757]: I1006 13:56:22.682430 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/71d18dba-d344-4779-9b18-cecbe20b22eb-ovsdbserver-sb\") pod \"dnsmasq-dns-7fb487d997-ctdfh\" (UID: \"71d18dba-d344-4779-9b18-cecbe20b22eb\") " pod="openstack/dnsmasq-dns-7fb487d997-ctdfh" Oct 06 13:56:22 crc kubenswrapper[4757]: I1006 13:56:22.682531 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/71d18dba-d344-4779-9b18-cecbe20b22eb-dns-swift-storage-0\") pod \"dnsmasq-dns-7fb487d997-ctdfh\" (UID: \"71d18dba-d344-4779-9b18-cecbe20b22eb\") " pod="openstack/dnsmasq-dns-7fb487d997-ctdfh" Oct 06 13:56:22 crc kubenswrapper[4757]: I1006 13:56:22.682701 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/71d18dba-d344-4779-9b18-cecbe20b22eb-config\") pod \"dnsmasq-dns-7fb487d997-ctdfh\" (UID: \"71d18dba-d344-4779-9b18-cecbe20b22eb\") " pod="openstack/dnsmasq-dns-7fb487d997-ctdfh" Oct 06 13:56:22 crc kubenswrapper[4757]: I1006 13:56:22.702273 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-69x4d\" (UniqueName: \"kubernetes.io/projected/71d18dba-d344-4779-9b18-cecbe20b22eb-kube-api-access-69x4d\") pod \"dnsmasq-dns-7fb487d997-ctdfh\" (UID: \"71d18dba-d344-4779-9b18-cecbe20b22eb\") " pod="openstack/dnsmasq-dns-7fb487d997-ctdfh" Oct 06 13:56:22 crc kubenswrapper[4757]: I1006 13:56:22.777026 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7fb487d997-ctdfh" Oct 06 13:56:31 crc kubenswrapper[4757]: I1006 13:56:31.514268 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7fb487d997-ctdfh"] Oct 06 13:56:31 crc kubenswrapper[4757]: W1006 13:56:31.567503 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod71d18dba_d344_4779_9b18_cecbe20b22eb.slice/crio-a5efeb65cad4fc6e4765880259b7d437beb6f49fe28587d0e23fca2854f490dd WatchSource:0}: Error finding container a5efeb65cad4fc6e4765880259b7d437beb6f49fe28587d0e23fca2854f490dd: Status 404 returned error can't find the container with id a5efeb65cad4fc6e4765880259b7d437beb6f49fe28587d0e23fca2854f490dd Oct 06 13:56:31 crc kubenswrapper[4757]: I1006 13:56:31.669386 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Oct 06 13:56:31 crc kubenswrapper[4757]: I1006 13:56:31.930660 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Oct 06 13:56:31 crc kubenswrapper[4757]: I1006 13:56:31.935817 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-bptsp"] Oct 06 13:56:31 crc kubenswrapper[4757]: I1006 13:56:31.937621 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-bptsp" Oct 06 13:56:31 crc kubenswrapper[4757]: I1006 13:56:31.946187 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-bptsp"] Oct 06 13:56:32 crc kubenswrapper[4757]: I1006 13:56:32.032581 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-6b2cq"] Oct 06 13:56:32 crc kubenswrapper[4757]: I1006 13:56:32.033574 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-6b2cq" Oct 06 13:56:32 crc kubenswrapper[4757]: I1006 13:56:32.037677 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cmn5h\" (UniqueName: \"kubernetes.io/projected/91433fac-000a-4c4c-bfc4-7f2e7c762483-kube-api-access-cmn5h\") pod \"cinder-db-create-bptsp\" (UID: \"91433fac-000a-4c4c-bfc4-7f2e7c762483\") " pod="openstack/cinder-db-create-bptsp" Oct 06 13:56:32 crc kubenswrapper[4757]: I1006 13:56:32.080676 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-6b2cq"] Oct 06 13:56:32 crc kubenswrapper[4757]: I1006 13:56:32.138740 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pq7pz\" (UniqueName: \"kubernetes.io/projected/a0a0bfd7-a0ed-468e-9226-ae8f99e7b457-kube-api-access-pq7pz\") pod \"barbican-db-create-6b2cq\" (UID: \"a0a0bfd7-a0ed-468e-9226-ae8f99e7b457\") " pod="openstack/barbican-db-create-6b2cq" Oct 06 13:56:32 crc kubenswrapper[4757]: I1006 13:56:32.138933 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cmn5h\" (UniqueName: \"kubernetes.io/projected/91433fac-000a-4c4c-bfc4-7f2e7c762483-kube-api-access-cmn5h\") pod \"cinder-db-create-bptsp\" (UID: \"91433fac-000a-4c4c-bfc4-7f2e7c762483\") " pod="openstack/cinder-db-create-bptsp" Oct 06 13:56:32 crc kubenswrapper[4757]: I1006 13:56:32.161559 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cmn5h\" (UniqueName: \"kubernetes.io/projected/91433fac-000a-4c4c-bfc4-7f2e7c762483-kube-api-access-cmn5h\") pod \"cinder-db-create-bptsp\" (UID: \"91433fac-000a-4c4c-bfc4-7f2e7c762483\") " pod="openstack/cinder-db-create-bptsp" Oct 06 13:56:32 crc kubenswrapper[4757]: I1006 13:56:32.241231 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pq7pz\" (UniqueName: \"kubernetes.io/projected/a0a0bfd7-a0ed-468e-9226-ae8f99e7b457-kube-api-access-pq7pz\") pod \"barbican-db-create-6b2cq\" (UID: \"a0a0bfd7-a0ed-468e-9226-ae8f99e7b457\") " pod="openstack/barbican-db-create-6b2cq" Oct 06 13:56:32 crc kubenswrapper[4757]: I1006 13:56:32.251694 4757 generic.go:334] "Generic (PLEG): container finished" podID="71d18dba-d344-4779-9b18-cecbe20b22eb" containerID="e8441672de141ceee66a56624b66a6758d58f2d762b07f2d4ef4caa54965e974" exitCode=0 Oct 06 13:56:32 crc kubenswrapper[4757]: I1006 13:56:32.251737 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fb487d997-ctdfh" event={"ID":"71d18dba-d344-4779-9b18-cecbe20b22eb","Type":"ContainerDied","Data":"e8441672de141ceee66a56624b66a6758d58f2d762b07f2d4ef4caa54965e974"} Oct 06 13:56:32 crc kubenswrapper[4757]: I1006 13:56:32.251762 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fb487d997-ctdfh" event={"ID":"71d18dba-d344-4779-9b18-cecbe20b22eb","Type":"ContainerStarted","Data":"a5efeb65cad4fc6e4765880259b7d437beb6f49fe28587d0e23fca2854f490dd"} Oct 06 13:56:32 crc kubenswrapper[4757]: I1006 13:56:32.258551 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pq7pz\" (UniqueName: \"kubernetes.io/projected/a0a0bfd7-a0ed-468e-9226-ae8f99e7b457-kube-api-access-pq7pz\") pod \"barbican-db-create-6b2cq\" (UID: \"a0a0bfd7-a0ed-468e-9226-ae8f99e7b457\") " pod="openstack/barbican-db-create-6b2cq" Oct 06 13:56:32 crc kubenswrapper[4757]: I1006 13:56:32.276153 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-bptsp" Oct 06 13:56:32 crc kubenswrapper[4757]: I1006 13:56:32.347777 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-bpnp8"] Oct 06 13:56:32 crc kubenswrapper[4757]: I1006 13:56:32.356086 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-bpnp8" Oct 06 13:56:32 crc kubenswrapper[4757]: I1006 13:56:32.361486 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-6b2cq" Oct 06 13:56:32 crc kubenswrapper[4757]: I1006 13:56:32.370850 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-bpnp8"] Oct 06 13:56:32 crc kubenswrapper[4757]: I1006 13:56:32.427585 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-ms6bn"] Oct 06 13:56:32 crc kubenswrapper[4757]: I1006 13:56:32.431010 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-ms6bn" Oct 06 13:56:32 crc kubenswrapper[4757]: I1006 13:56:32.443581 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-d9pz6" Oct 06 13:56:32 crc kubenswrapper[4757]: I1006 13:56:32.443937 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Oct 06 13:56:32 crc kubenswrapper[4757]: I1006 13:56:32.444819 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Oct 06 13:56:32 crc kubenswrapper[4757]: I1006 13:56:32.445302 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Oct 06 13:56:32 crc kubenswrapper[4757]: I1006 13:56:32.445400 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5kbxq\" (UniqueName: \"kubernetes.io/projected/4a8780a9-61ea-43c8-a052-2e853328cb11-kube-api-access-5kbxq\") pod \"neutron-db-create-bpnp8\" (UID: \"4a8780a9-61ea-43c8-a052-2e853328cb11\") " pod="openstack/neutron-db-create-bpnp8" Oct 06 13:56:32 crc kubenswrapper[4757]: I1006 13:56:32.519968 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-ms6bn"] Oct 06 13:56:32 crc kubenswrapper[4757]: I1006 13:56:32.548322 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5kbxq\" (UniqueName: \"kubernetes.io/projected/4a8780a9-61ea-43c8-a052-2e853328cb11-kube-api-access-5kbxq\") pod \"neutron-db-create-bpnp8\" (UID: \"4a8780a9-61ea-43c8-a052-2e853328cb11\") " pod="openstack/neutron-db-create-bpnp8" Oct 06 13:56:32 crc kubenswrapper[4757]: I1006 13:56:32.548368 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p6pnq\" (UniqueName: \"kubernetes.io/projected/e243d6cc-b188-4dbb-a95e-21b9e6d52eeb-kube-api-access-p6pnq\") pod \"keystone-db-sync-ms6bn\" (UID: \"e243d6cc-b188-4dbb-a95e-21b9e6d52eeb\") " pod="openstack/keystone-db-sync-ms6bn" Oct 06 13:56:32 crc kubenswrapper[4757]: I1006 13:56:32.548457 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e243d6cc-b188-4dbb-a95e-21b9e6d52eeb-config-data\") pod \"keystone-db-sync-ms6bn\" (UID: \"e243d6cc-b188-4dbb-a95e-21b9e6d52eeb\") " pod="openstack/keystone-db-sync-ms6bn" Oct 06 13:56:32 crc kubenswrapper[4757]: I1006 13:56:32.548518 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e243d6cc-b188-4dbb-a95e-21b9e6d52eeb-combined-ca-bundle\") pod \"keystone-db-sync-ms6bn\" (UID: \"e243d6cc-b188-4dbb-a95e-21b9e6d52eeb\") " pod="openstack/keystone-db-sync-ms6bn" Oct 06 13:56:32 crc kubenswrapper[4757]: I1006 13:56:32.578345 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5kbxq\" (UniqueName: \"kubernetes.io/projected/4a8780a9-61ea-43c8-a052-2e853328cb11-kube-api-access-5kbxq\") pod \"neutron-db-create-bpnp8\" (UID: \"4a8780a9-61ea-43c8-a052-2e853328cb11\") " pod="openstack/neutron-db-create-bpnp8" Oct 06 13:56:32 crc kubenswrapper[4757]: I1006 13:56:32.649434 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p6pnq\" (UniqueName: \"kubernetes.io/projected/e243d6cc-b188-4dbb-a95e-21b9e6d52eeb-kube-api-access-p6pnq\") pod \"keystone-db-sync-ms6bn\" (UID: \"e243d6cc-b188-4dbb-a95e-21b9e6d52eeb\") " pod="openstack/keystone-db-sync-ms6bn" Oct 06 13:56:32 crc kubenswrapper[4757]: I1006 13:56:32.649523 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e243d6cc-b188-4dbb-a95e-21b9e6d52eeb-config-data\") pod \"keystone-db-sync-ms6bn\" (UID: \"e243d6cc-b188-4dbb-a95e-21b9e6d52eeb\") " pod="openstack/keystone-db-sync-ms6bn" Oct 06 13:56:32 crc kubenswrapper[4757]: I1006 13:56:32.649576 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e243d6cc-b188-4dbb-a95e-21b9e6d52eeb-combined-ca-bundle\") pod \"keystone-db-sync-ms6bn\" (UID: \"e243d6cc-b188-4dbb-a95e-21b9e6d52eeb\") " pod="openstack/keystone-db-sync-ms6bn" Oct 06 13:56:32 crc kubenswrapper[4757]: I1006 13:56:32.653807 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e243d6cc-b188-4dbb-a95e-21b9e6d52eeb-combined-ca-bundle\") pod \"keystone-db-sync-ms6bn\" (UID: \"e243d6cc-b188-4dbb-a95e-21b9e6d52eeb\") " pod="openstack/keystone-db-sync-ms6bn" Oct 06 13:56:32 crc kubenswrapper[4757]: I1006 13:56:32.657968 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e243d6cc-b188-4dbb-a95e-21b9e6d52eeb-config-data\") pod \"keystone-db-sync-ms6bn\" (UID: \"e243d6cc-b188-4dbb-a95e-21b9e6d52eeb\") " pod="openstack/keystone-db-sync-ms6bn" Oct 06 13:56:32 crc kubenswrapper[4757]: I1006 13:56:32.676657 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p6pnq\" (UniqueName: \"kubernetes.io/projected/e243d6cc-b188-4dbb-a95e-21b9e6d52eeb-kube-api-access-p6pnq\") pod \"keystone-db-sync-ms6bn\" (UID: \"e243d6cc-b188-4dbb-a95e-21b9e6d52eeb\") " pod="openstack/keystone-db-sync-ms6bn" Oct 06 13:56:32 crc kubenswrapper[4757]: I1006 13:56:32.692923 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-bptsp"] Oct 06 13:56:32 crc kubenswrapper[4757]: I1006 13:56:32.766280 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-6b2cq"] Oct 06 13:56:32 crc kubenswrapper[4757]: W1006 13:56:32.770624 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda0a0bfd7_a0ed_468e_9226_ae8f99e7b457.slice/crio-7ce26653b7a8887935e5cee11a2b508716c78181db456448c368743f1284978d WatchSource:0}: Error finding container 7ce26653b7a8887935e5cee11a2b508716c78181db456448c368743f1284978d: Status 404 returned error can't find the container with id 7ce26653b7a8887935e5cee11a2b508716c78181db456448c368743f1284978d Oct 06 13:56:32 crc kubenswrapper[4757]: I1006 13:56:32.803132 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-bpnp8" Oct 06 13:56:32 crc kubenswrapper[4757]: I1006 13:56:32.839301 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-ms6bn" Oct 06 13:56:33 crc kubenswrapper[4757]: I1006 13:56:33.272661 4757 generic.go:334] "Generic (PLEG): container finished" podID="91433fac-000a-4c4c-bfc4-7f2e7c762483" containerID="0d642b8e9315ccd4eb8cc69149282e3a4cc291685b4614e00eb8322c6b1d9bc6" exitCode=0 Oct 06 13:56:33 crc kubenswrapper[4757]: I1006 13:56:33.272722 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-bptsp" event={"ID":"91433fac-000a-4c4c-bfc4-7f2e7c762483","Type":"ContainerDied","Data":"0d642b8e9315ccd4eb8cc69149282e3a4cc291685b4614e00eb8322c6b1d9bc6"} Oct 06 13:56:33 crc kubenswrapper[4757]: I1006 13:56:33.272746 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-bptsp" event={"ID":"91433fac-000a-4c4c-bfc4-7f2e7c762483","Type":"ContainerStarted","Data":"0784247e3f491bd3a9788f3127ecd8e5b08f2605d629a6dfcbcf4729a710557f"} Oct 06 13:56:33 crc kubenswrapper[4757]: I1006 13:56:33.278026 4757 generic.go:334] "Generic (PLEG): container finished" podID="a0a0bfd7-a0ed-468e-9226-ae8f99e7b457" containerID="c0a49243055a7805559b9f7f3b0ff4636eef387948f20f859087ff9372d37c1b" exitCode=0 Oct 06 13:56:33 crc kubenswrapper[4757]: I1006 13:56:33.278147 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-6b2cq" event={"ID":"a0a0bfd7-a0ed-468e-9226-ae8f99e7b457","Type":"ContainerDied","Data":"c0a49243055a7805559b9f7f3b0ff4636eef387948f20f859087ff9372d37c1b"} Oct 06 13:56:33 crc kubenswrapper[4757]: I1006 13:56:33.278179 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-6b2cq" event={"ID":"a0a0bfd7-a0ed-468e-9226-ae8f99e7b457","Type":"ContainerStarted","Data":"7ce26653b7a8887935e5cee11a2b508716c78181db456448c368743f1284978d"} Oct 06 13:56:33 crc kubenswrapper[4757]: I1006 13:56:33.280050 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fb487d997-ctdfh" event={"ID":"71d18dba-d344-4779-9b18-cecbe20b22eb","Type":"ContainerStarted","Data":"5606e8efd9a2437fd5572db5e168149262d1347c44f3fb369868f75a0a20065a"} Oct 06 13:56:33 crc kubenswrapper[4757]: I1006 13:56:33.280687 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7fb487d997-ctdfh" Oct 06 13:56:33 crc kubenswrapper[4757]: I1006 13:56:33.289392 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-bswtq" event={"ID":"b257515d-a807-4a28-8b28-0e6390aa9d42","Type":"ContainerStarted","Data":"0fd05490d9790b9d51fe787c6d786ad598ea8b54f12c45e654e901227b207bd1"} Oct 06 13:56:33 crc kubenswrapper[4757]: W1006 13:56:33.313668 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4a8780a9_61ea_43c8_a052_2e853328cb11.slice/crio-b0d96dd30a55e39a8f396fdc506f534c8b43fcbad0d865fb624e2e580547fcc4 WatchSource:0}: Error finding container b0d96dd30a55e39a8f396fdc506f534c8b43fcbad0d865fb624e2e580547fcc4: Status 404 returned error can't find the container with id b0d96dd30a55e39a8f396fdc506f534c8b43fcbad0d865fb624e2e580547fcc4 Oct 06 13:56:33 crc kubenswrapper[4757]: I1006 13:56:33.329573 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-bpnp8"] Oct 06 13:56:33 crc kubenswrapper[4757]: I1006 13:56:33.333046 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7fb487d997-ctdfh" podStartSLOduration=11.333027969 podStartE2EDuration="11.333027969s" podCreationTimestamp="2025-10-06 13:56:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:56:33.325726373 +0000 UTC m=+1081.823044910" watchObservedRunningTime="2025-10-06 13:56:33.333027969 +0000 UTC m=+1081.830346506" Oct 06 13:56:33 crc kubenswrapper[4757]: I1006 13:56:33.348180 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-bswtq" podStartSLOduration=2.906222532 podStartE2EDuration="18.348164814s" podCreationTimestamp="2025-10-06 13:56:15 +0000 UTC" firstStartedPulling="2025-10-06 13:56:16.175294446 +0000 UTC m=+1064.672613003" lastFinishedPulling="2025-10-06 13:56:31.617236738 +0000 UTC m=+1080.114555285" observedRunningTime="2025-10-06 13:56:33.346546204 +0000 UTC m=+1081.843864751" watchObservedRunningTime="2025-10-06 13:56:33.348164814 +0000 UTC m=+1081.845483351" Oct 06 13:56:33 crc kubenswrapper[4757]: I1006 13:56:33.414744 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-ms6bn"] Oct 06 13:56:33 crc kubenswrapper[4757]: W1006 13:56:33.418309 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode243d6cc_b188_4dbb_a95e_21b9e6d52eeb.slice/crio-2348c8261ee42310eae93debff80ea7266134a6c35865280afc42eec7a0793e0 WatchSource:0}: Error finding container 2348c8261ee42310eae93debff80ea7266134a6c35865280afc42eec7a0793e0: Status 404 returned error can't find the container with id 2348c8261ee42310eae93debff80ea7266134a6c35865280afc42eec7a0793e0 Oct 06 13:56:34 crc kubenswrapper[4757]: I1006 13:56:34.299740 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-ms6bn" event={"ID":"e243d6cc-b188-4dbb-a95e-21b9e6d52eeb","Type":"ContainerStarted","Data":"2348c8261ee42310eae93debff80ea7266134a6c35865280afc42eec7a0793e0"} Oct 06 13:56:34 crc kubenswrapper[4757]: I1006 13:56:34.303047 4757 generic.go:334] "Generic (PLEG): container finished" podID="4a8780a9-61ea-43c8-a052-2e853328cb11" containerID="0c00dffa5260f96cc6b67586125cd512b47e1b2350ec1a5a68e7c643721a29c7" exitCode=0 Oct 06 13:56:34 crc kubenswrapper[4757]: I1006 13:56:34.303244 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-bpnp8" event={"ID":"4a8780a9-61ea-43c8-a052-2e853328cb11","Type":"ContainerDied","Data":"0c00dffa5260f96cc6b67586125cd512b47e1b2350ec1a5a68e7c643721a29c7"} Oct 06 13:56:34 crc kubenswrapper[4757]: I1006 13:56:34.303304 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-bpnp8" event={"ID":"4a8780a9-61ea-43c8-a052-2e853328cb11","Type":"ContainerStarted","Data":"b0d96dd30a55e39a8f396fdc506f534c8b43fcbad0d865fb624e2e580547fcc4"} Oct 06 13:56:34 crc kubenswrapper[4757]: I1006 13:56:34.716241 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-bptsp" Oct 06 13:56:34 crc kubenswrapper[4757]: I1006 13:56:34.720824 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-6b2cq" Oct 06 13:56:34 crc kubenswrapper[4757]: I1006 13:56:34.895725 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cmn5h\" (UniqueName: \"kubernetes.io/projected/91433fac-000a-4c4c-bfc4-7f2e7c762483-kube-api-access-cmn5h\") pod \"91433fac-000a-4c4c-bfc4-7f2e7c762483\" (UID: \"91433fac-000a-4c4c-bfc4-7f2e7c762483\") " Oct 06 13:56:34 crc kubenswrapper[4757]: I1006 13:56:34.896148 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pq7pz\" (UniqueName: \"kubernetes.io/projected/a0a0bfd7-a0ed-468e-9226-ae8f99e7b457-kube-api-access-pq7pz\") pod \"a0a0bfd7-a0ed-468e-9226-ae8f99e7b457\" (UID: \"a0a0bfd7-a0ed-468e-9226-ae8f99e7b457\") " Oct 06 13:56:34 crc kubenswrapper[4757]: I1006 13:56:34.904451 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/91433fac-000a-4c4c-bfc4-7f2e7c762483-kube-api-access-cmn5h" (OuterVolumeSpecName: "kube-api-access-cmn5h") pod "91433fac-000a-4c4c-bfc4-7f2e7c762483" (UID: "91433fac-000a-4c4c-bfc4-7f2e7c762483"). InnerVolumeSpecName "kube-api-access-cmn5h". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:56:34 crc kubenswrapper[4757]: I1006 13:56:34.926292 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0a0bfd7-a0ed-468e-9226-ae8f99e7b457-kube-api-access-pq7pz" (OuterVolumeSpecName: "kube-api-access-pq7pz") pod "a0a0bfd7-a0ed-468e-9226-ae8f99e7b457" (UID: "a0a0bfd7-a0ed-468e-9226-ae8f99e7b457"). InnerVolumeSpecName "kube-api-access-pq7pz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:56:34 crc kubenswrapper[4757]: I1006 13:56:34.998679 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pq7pz\" (UniqueName: \"kubernetes.io/projected/a0a0bfd7-a0ed-468e-9226-ae8f99e7b457-kube-api-access-pq7pz\") on node \"crc\" DevicePath \"\"" Oct 06 13:56:34 crc kubenswrapper[4757]: I1006 13:56:34.998726 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cmn5h\" (UniqueName: \"kubernetes.io/projected/91433fac-000a-4c4c-bfc4-7f2e7c762483-kube-api-access-cmn5h\") on node \"crc\" DevicePath \"\"" Oct 06 13:56:35 crc kubenswrapper[4757]: I1006 13:56:35.327213 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-6b2cq" Oct 06 13:56:35 crc kubenswrapper[4757]: I1006 13:56:35.327916 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-6b2cq" event={"ID":"a0a0bfd7-a0ed-468e-9226-ae8f99e7b457","Type":"ContainerDied","Data":"7ce26653b7a8887935e5cee11a2b508716c78181db456448c368743f1284978d"} Oct 06 13:56:35 crc kubenswrapper[4757]: I1006 13:56:35.327967 4757 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7ce26653b7a8887935e5cee11a2b508716c78181db456448c368743f1284978d" Oct 06 13:56:35 crc kubenswrapper[4757]: I1006 13:56:35.332792 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-bptsp" Oct 06 13:56:35 crc kubenswrapper[4757]: I1006 13:56:35.332967 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-bptsp" event={"ID":"91433fac-000a-4c4c-bfc4-7f2e7c762483","Type":"ContainerDied","Data":"0784247e3f491bd3a9788f3127ecd8e5b08f2605d629a6dfcbcf4729a710557f"} Oct 06 13:56:35 crc kubenswrapper[4757]: I1006 13:56:35.333106 4757 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0784247e3f491bd3a9788f3127ecd8e5b08f2605d629a6dfcbcf4729a710557f" Oct 06 13:56:35 crc kubenswrapper[4757]: I1006 13:56:35.570663 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-bpnp8" Oct 06 13:56:35 crc kubenswrapper[4757]: I1006 13:56:35.713806 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5kbxq\" (UniqueName: \"kubernetes.io/projected/4a8780a9-61ea-43c8-a052-2e853328cb11-kube-api-access-5kbxq\") pod \"4a8780a9-61ea-43c8-a052-2e853328cb11\" (UID: \"4a8780a9-61ea-43c8-a052-2e853328cb11\") " Oct 06 13:56:35 crc kubenswrapper[4757]: I1006 13:56:35.717355 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4a8780a9-61ea-43c8-a052-2e853328cb11-kube-api-access-5kbxq" (OuterVolumeSpecName: "kube-api-access-5kbxq") pod "4a8780a9-61ea-43c8-a052-2e853328cb11" (UID: "4a8780a9-61ea-43c8-a052-2e853328cb11"). InnerVolumeSpecName "kube-api-access-5kbxq". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:56:35 crc kubenswrapper[4757]: I1006 13:56:35.816296 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5kbxq\" (UniqueName: \"kubernetes.io/projected/4a8780a9-61ea-43c8-a052-2e853328cb11-kube-api-access-5kbxq\") on node \"crc\" DevicePath \"\"" Oct 06 13:56:36 crc kubenswrapper[4757]: I1006 13:56:36.342280 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-bpnp8" event={"ID":"4a8780a9-61ea-43c8-a052-2e853328cb11","Type":"ContainerDied","Data":"b0d96dd30a55e39a8f396fdc506f534c8b43fcbad0d865fb624e2e580547fcc4"} Oct 06 13:56:36 crc kubenswrapper[4757]: I1006 13:56:36.342321 4757 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b0d96dd30a55e39a8f396fdc506f534c8b43fcbad0d865fb624e2e580547fcc4" Oct 06 13:56:36 crc kubenswrapper[4757]: I1006 13:56:36.342374 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-bpnp8" Oct 06 13:56:37 crc kubenswrapper[4757]: I1006 13:56:37.780245 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-7fb487d997-ctdfh" Oct 06 13:56:37 crc kubenswrapper[4757]: I1006 13:56:37.851745 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-647b75b6c-c9gqf"] Oct 06 13:56:37 crc kubenswrapper[4757]: I1006 13:56:37.852053 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-647b75b6c-c9gqf" podUID="7cd83436-1c2f-4506-8505-c21002020f08" containerName="dnsmasq-dns" containerID="cri-o://376c98aafdd1481ec49271aaf4e5afaed826cd382cffa7ebf5b346dddaef55eb" gracePeriod=10 Oct 06 13:56:38 crc kubenswrapper[4757]: I1006 13:56:38.395257 4757 generic.go:334] "Generic (PLEG): container finished" podID="7cd83436-1c2f-4506-8505-c21002020f08" containerID="376c98aafdd1481ec49271aaf4e5afaed826cd382cffa7ebf5b346dddaef55eb" exitCode=0 Oct 06 13:56:38 crc kubenswrapper[4757]: I1006 13:56:38.395563 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-647b75b6c-c9gqf" event={"ID":"7cd83436-1c2f-4506-8505-c21002020f08","Type":"ContainerDied","Data":"376c98aafdd1481ec49271aaf4e5afaed826cd382cffa7ebf5b346dddaef55eb"} Oct 06 13:56:38 crc kubenswrapper[4757]: I1006 13:56:38.476203 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-647b75b6c-c9gqf" Oct 06 13:56:38 crc kubenswrapper[4757]: I1006 13:56:38.590649 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rgnnj\" (UniqueName: \"kubernetes.io/projected/7cd83436-1c2f-4506-8505-c21002020f08-kube-api-access-rgnnj\") pod \"7cd83436-1c2f-4506-8505-c21002020f08\" (UID: \"7cd83436-1c2f-4506-8505-c21002020f08\") " Oct 06 13:56:38 crc kubenswrapper[4757]: I1006 13:56:38.590985 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7cd83436-1c2f-4506-8505-c21002020f08-ovsdbserver-sb\") pod \"7cd83436-1c2f-4506-8505-c21002020f08\" (UID: \"7cd83436-1c2f-4506-8505-c21002020f08\") " Oct 06 13:56:38 crc kubenswrapper[4757]: I1006 13:56:38.591617 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7cd83436-1c2f-4506-8505-c21002020f08-dns-svc\") pod \"7cd83436-1c2f-4506-8505-c21002020f08\" (UID: \"7cd83436-1c2f-4506-8505-c21002020f08\") " Oct 06 13:56:38 crc kubenswrapper[4757]: I1006 13:56:38.591735 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7cd83436-1c2f-4506-8505-c21002020f08-ovsdbserver-nb\") pod \"7cd83436-1c2f-4506-8505-c21002020f08\" (UID: \"7cd83436-1c2f-4506-8505-c21002020f08\") " Oct 06 13:56:38 crc kubenswrapper[4757]: I1006 13:56:38.591757 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7cd83436-1c2f-4506-8505-c21002020f08-config\") pod \"7cd83436-1c2f-4506-8505-c21002020f08\" (UID: \"7cd83436-1c2f-4506-8505-c21002020f08\") " Oct 06 13:56:38 crc kubenswrapper[4757]: I1006 13:56:38.595862 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7cd83436-1c2f-4506-8505-c21002020f08-kube-api-access-rgnnj" (OuterVolumeSpecName: "kube-api-access-rgnnj") pod "7cd83436-1c2f-4506-8505-c21002020f08" (UID: "7cd83436-1c2f-4506-8505-c21002020f08"). InnerVolumeSpecName "kube-api-access-rgnnj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:56:38 crc kubenswrapper[4757]: I1006 13:56:38.631460 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7cd83436-1c2f-4506-8505-c21002020f08-config" (OuterVolumeSpecName: "config") pod "7cd83436-1c2f-4506-8505-c21002020f08" (UID: "7cd83436-1c2f-4506-8505-c21002020f08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:56:38 crc kubenswrapper[4757]: I1006 13:56:38.632440 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7cd83436-1c2f-4506-8505-c21002020f08-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "7cd83436-1c2f-4506-8505-c21002020f08" (UID: "7cd83436-1c2f-4506-8505-c21002020f08"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:56:38 crc kubenswrapper[4757]: I1006 13:56:38.634845 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7cd83436-1c2f-4506-8505-c21002020f08-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "7cd83436-1c2f-4506-8505-c21002020f08" (UID: "7cd83436-1c2f-4506-8505-c21002020f08"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:56:38 crc kubenswrapper[4757]: I1006 13:56:38.638867 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7cd83436-1c2f-4506-8505-c21002020f08-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "7cd83436-1c2f-4506-8505-c21002020f08" (UID: "7cd83436-1c2f-4506-8505-c21002020f08"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:56:38 crc kubenswrapper[4757]: I1006 13:56:38.692845 4757 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7cd83436-1c2f-4506-8505-c21002020f08-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 06 13:56:38 crc kubenswrapper[4757]: I1006 13:56:38.692872 4757 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7cd83436-1c2f-4506-8505-c21002020f08-config\") on node \"crc\" DevicePath \"\"" Oct 06 13:56:38 crc kubenswrapper[4757]: I1006 13:56:38.692882 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rgnnj\" (UniqueName: \"kubernetes.io/projected/7cd83436-1c2f-4506-8505-c21002020f08-kube-api-access-rgnnj\") on node \"crc\" DevicePath \"\"" Oct 06 13:56:38 crc kubenswrapper[4757]: I1006 13:56:38.692892 4757 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7cd83436-1c2f-4506-8505-c21002020f08-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 06 13:56:38 crc kubenswrapper[4757]: I1006 13:56:38.692900 4757 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7cd83436-1c2f-4506-8505-c21002020f08-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 06 13:56:39 crc kubenswrapper[4757]: I1006 13:56:39.405714 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-ms6bn" event={"ID":"e243d6cc-b188-4dbb-a95e-21b9e6d52eeb","Type":"ContainerStarted","Data":"fe40956a831a0da9019f8af107b4834c649a88bece271363e82d0b6d1b5ddd22"} Oct 06 13:56:39 crc kubenswrapper[4757]: I1006 13:56:39.408158 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-647b75b6c-c9gqf" event={"ID":"7cd83436-1c2f-4506-8505-c21002020f08","Type":"ContainerDied","Data":"37ddbb752212d758c67e7d208d62c88409189ccd2e2c28d5c570fcbdd7f019bd"} Oct 06 13:56:39 crc kubenswrapper[4757]: I1006 13:56:39.408217 4757 scope.go:117] "RemoveContainer" containerID="376c98aafdd1481ec49271aaf4e5afaed826cd382cffa7ebf5b346dddaef55eb" Oct 06 13:56:39 crc kubenswrapper[4757]: I1006 13:56:39.408271 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-647b75b6c-c9gqf" Oct 06 13:56:39 crc kubenswrapper[4757]: I1006 13:56:39.439033 4757 scope.go:117] "RemoveContainer" containerID="edc7c0c813673d3c082161a592fea917778977c75438725a6f1503c00cdec014" Oct 06 13:56:39 crc kubenswrapper[4757]: I1006 13:56:39.465355 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-ms6bn" podStartSLOduration=2.704233323 podStartE2EDuration="7.465331555s" podCreationTimestamp="2025-10-06 13:56:32 +0000 UTC" firstStartedPulling="2025-10-06 13:56:33.420522522 +0000 UTC m=+1081.917841069" lastFinishedPulling="2025-10-06 13:56:38.181620754 +0000 UTC m=+1086.678939301" observedRunningTime="2025-10-06 13:56:39.431554026 +0000 UTC m=+1087.928872573" watchObservedRunningTime="2025-10-06 13:56:39.465331555 +0000 UTC m=+1087.962650092" Oct 06 13:56:39 crc kubenswrapper[4757]: I1006 13:56:39.468065 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-647b75b6c-c9gqf"] Oct 06 13:56:39 crc kubenswrapper[4757]: I1006 13:56:39.473691 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-647b75b6c-c9gqf"] Oct 06 13:56:40 crc kubenswrapper[4757]: I1006 13:56:40.191069 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7cd83436-1c2f-4506-8505-c21002020f08" path="/var/lib/kubelet/pods/7cd83436-1c2f-4506-8505-c21002020f08/volumes" Oct 06 13:56:40 crc kubenswrapper[4757]: I1006 13:56:40.421825 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-bswtq" event={"ID":"b257515d-a807-4a28-8b28-0e6390aa9d42","Type":"ContainerDied","Data":"0fd05490d9790b9d51fe787c6d786ad598ea8b54f12c45e654e901227b207bd1"} Oct 06 13:56:40 crc kubenswrapper[4757]: I1006 13:56:40.422184 4757 generic.go:334] "Generic (PLEG): container finished" podID="b257515d-a807-4a28-8b28-0e6390aa9d42" containerID="0fd05490d9790b9d51fe787c6d786ad598ea8b54f12c45e654e901227b207bd1" exitCode=0 Oct 06 13:56:41 crc kubenswrapper[4757]: I1006 13:56:41.432267 4757 generic.go:334] "Generic (PLEG): container finished" podID="e243d6cc-b188-4dbb-a95e-21b9e6d52eeb" containerID="fe40956a831a0da9019f8af107b4834c649a88bece271363e82d0b6d1b5ddd22" exitCode=0 Oct 06 13:56:41 crc kubenswrapper[4757]: I1006 13:56:41.432353 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-ms6bn" event={"ID":"e243d6cc-b188-4dbb-a95e-21b9e6d52eeb","Type":"ContainerDied","Data":"fe40956a831a0da9019f8af107b4834c649a88bece271363e82d0b6d1b5ddd22"} Oct 06 13:56:41 crc kubenswrapper[4757]: I1006 13:56:41.859639 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-bswtq" Oct 06 13:56:41 crc kubenswrapper[4757]: I1006 13:56:41.947592 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b257515d-a807-4a28-8b28-0e6390aa9d42-config-data\") pod \"b257515d-a807-4a28-8b28-0e6390aa9d42\" (UID: \"b257515d-a807-4a28-8b28-0e6390aa9d42\") " Oct 06 13:56:41 crc kubenswrapper[4757]: I1006 13:56:41.947668 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b257515d-a807-4a28-8b28-0e6390aa9d42-combined-ca-bundle\") pod \"b257515d-a807-4a28-8b28-0e6390aa9d42\" (UID: \"b257515d-a807-4a28-8b28-0e6390aa9d42\") " Oct 06 13:56:41 crc kubenswrapper[4757]: I1006 13:56:41.947692 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b257515d-a807-4a28-8b28-0e6390aa9d42-db-sync-config-data\") pod \"b257515d-a807-4a28-8b28-0e6390aa9d42\" (UID: \"b257515d-a807-4a28-8b28-0e6390aa9d42\") " Oct 06 13:56:41 crc kubenswrapper[4757]: I1006 13:56:41.947722 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k8zmv\" (UniqueName: \"kubernetes.io/projected/b257515d-a807-4a28-8b28-0e6390aa9d42-kube-api-access-k8zmv\") pod \"b257515d-a807-4a28-8b28-0e6390aa9d42\" (UID: \"b257515d-a807-4a28-8b28-0e6390aa9d42\") " Oct 06 13:56:41 crc kubenswrapper[4757]: I1006 13:56:41.959524 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b257515d-a807-4a28-8b28-0e6390aa9d42-kube-api-access-k8zmv" (OuterVolumeSpecName: "kube-api-access-k8zmv") pod "b257515d-a807-4a28-8b28-0e6390aa9d42" (UID: "b257515d-a807-4a28-8b28-0e6390aa9d42"). InnerVolumeSpecName "kube-api-access-k8zmv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:56:41 crc kubenswrapper[4757]: I1006 13:56:41.963669 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b257515d-a807-4a28-8b28-0e6390aa9d42-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "b257515d-a807-4a28-8b28-0e6390aa9d42" (UID: "b257515d-a807-4a28-8b28-0e6390aa9d42"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:56:41 crc kubenswrapper[4757]: I1006 13:56:41.981251 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b257515d-a807-4a28-8b28-0e6390aa9d42-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b257515d-a807-4a28-8b28-0e6390aa9d42" (UID: "b257515d-a807-4a28-8b28-0e6390aa9d42"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:56:41 crc kubenswrapper[4757]: I1006 13:56:41.999467 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b257515d-a807-4a28-8b28-0e6390aa9d42-config-data" (OuterVolumeSpecName: "config-data") pod "b257515d-a807-4a28-8b28-0e6390aa9d42" (UID: "b257515d-a807-4a28-8b28-0e6390aa9d42"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:56:42 crc kubenswrapper[4757]: I1006 13:56:42.053376 4757 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b257515d-a807-4a28-8b28-0e6390aa9d42-config-data\") on node \"crc\" DevicePath \"\"" Oct 06 13:56:42 crc kubenswrapper[4757]: I1006 13:56:42.053412 4757 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b257515d-a807-4a28-8b28-0e6390aa9d42-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 06 13:56:42 crc kubenswrapper[4757]: I1006 13:56:42.053422 4757 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b257515d-a807-4a28-8b28-0e6390aa9d42-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Oct 06 13:56:42 crc kubenswrapper[4757]: I1006 13:56:42.053432 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k8zmv\" (UniqueName: \"kubernetes.io/projected/b257515d-a807-4a28-8b28-0e6390aa9d42-kube-api-access-k8zmv\") on node \"crc\" DevicePath \"\"" Oct 06 13:56:42 crc kubenswrapper[4757]: I1006 13:56:42.066033 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-39b0-account-create-slfc6"] Oct 06 13:56:42 crc kubenswrapper[4757]: E1006 13:56:42.066472 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7cd83436-1c2f-4506-8505-c21002020f08" containerName="dnsmasq-dns" Oct 06 13:56:42 crc kubenswrapper[4757]: I1006 13:56:42.066500 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="7cd83436-1c2f-4506-8505-c21002020f08" containerName="dnsmasq-dns" Oct 06 13:56:42 crc kubenswrapper[4757]: E1006 13:56:42.066523 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b257515d-a807-4a28-8b28-0e6390aa9d42" containerName="glance-db-sync" Oct 06 13:56:42 crc kubenswrapper[4757]: I1006 13:56:42.066537 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="b257515d-a807-4a28-8b28-0e6390aa9d42" containerName="glance-db-sync" Oct 06 13:56:42 crc kubenswrapper[4757]: E1006 13:56:42.066576 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a0a0bfd7-a0ed-468e-9226-ae8f99e7b457" containerName="mariadb-database-create" Oct 06 13:56:42 crc kubenswrapper[4757]: I1006 13:56:42.066586 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="a0a0bfd7-a0ed-468e-9226-ae8f99e7b457" containerName="mariadb-database-create" Oct 06 13:56:42 crc kubenswrapper[4757]: E1006 13:56:42.066597 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="91433fac-000a-4c4c-bfc4-7f2e7c762483" containerName="mariadb-database-create" Oct 06 13:56:42 crc kubenswrapper[4757]: I1006 13:56:42.066605 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="91433fac-000a-4c4c-bfc4-7f2e7c762483" containerName="mariadb-database-create" Oct 06 13:56:42 crc kubenswrapper[4757]: E1006 13:56:42.066617 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a8780a9-61ea-43c8-a052-2e853328cb11" containerName="mariadb-database-create" Oct 06 13:56:42 crc kubenswrapper[4757]: I1006 13:56:42.066626 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a8780a9-61ea-43c8-a052-2e853328cb11" containerName="mariadb-database-create" Oct 06 13:56:42 crc kubenswrapper[4757]: E1006 13:56:42.066641 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7cd83436-1c2f-4506-8505-c21002020f08" containerName="init" Oct 06 13:56:42 crc kubenswrapper[4757]: I1006 13:56:42.066648 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="7cd83436-1c2f-4506-8505-c21002020f08" containerName="init" Oct 06 13:56:42 crc kubenswrapper[4757]: I1006 13:56:42.066924 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="a0a0bfd7-a0ed-468e-9226-ae8f99e7b457" containerName="mariadb-database-create" Oct 06 13:56:42 crc kubenswrapper[4757]: I1006 13:56:42.066948 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="4a8780a9-61ea-43c8-a052-2e853328cb11" containerName="mariadb-database-create" Oct 06 13:56:42 crc kubenswrapper[4757]: I1006 13:56:42.066958 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="91433fac-000a-4c4c-bfc4-7f2e7c762483" containerName="mariadb-database-create" Oct 06 13:56:42 crc kubenswrapper[4757]: I1006 13:56:42.066973 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="b257515d-a807-4a28-8b28-0e6390aa9d42" containerName="glance-db-sync" Oct 06 13:56:42 crc kubenswrapper[4757]: I1006 13:56:42.066984 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="7cd83436-1c2f-4506-8505-c21002020f08" containerName="dnsmasq-dns" Oct 06 13:56:42 crc kubenswrapper[4757]: I1006 13:56:42.067664 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-39b0-account-create-slfc6" Oct 06 13:56:42 crc kubenswrapper[4757]: I1006 13:56:42.069643 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Oct 06 13:56:42 crc kubenswrapper[4757]: I1006 13:56:42.075758 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-39b0-account-create-slfc6"] Oct 06 13:56:42 crc kubenswrapper[4757]: I1006 13:56:42.154594 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cvp5k\" (UniqueName: \"kubernetes.io/projected/ed46f68f-a023-405d-8b70-4195b82ed6eb-kube-api-access-cvp5k\") pod \"barbican-39b0-account-create-slfc6\" (UID: \"ed46f68f-a023-405d-8b70-4195b82ed6eb\") " pod="openstack/barbican-39b0-account-create-slfc6" Oct 06 13:56:42 crc kubenswrapper[4757]: I1006 13:56:42.154598 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-9f43-account-create-8588z"] Oct 06 13:56:42 crc kubenswrapper[4757]: I1006 13:56:42.157190 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-9f43-account-create-8588z" Oct 06 13:56:42 crc kubenswrapper[4757]: I1006 13:56:42.158963 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Oct 06 13:56:42 crc kubenswrapper[4757]: I1006 13:56:42.164461 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-9f43-account-create-8588z"] Oct 06 13:56:42 crc kubenswrapper[4757]: I1006 13:56:42.255875 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pr5qv\" (UniqueName: \"kubernetes.io/projected/22c17521-7f6c-4704-92c2-e075c6e35f5d-kube-api-access-pr5qv\") pod \"cinder-9f43-account-create-8588z\" (UID: \"22c17521-7f6c-4704-92c2-e075c6e35f5d\") " pod="openstack/cinder-9f43-account-create-8588z" Oct 06 13:56:42 crc kubenswrapper[4757]: I1006 13:56:42.256256 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cvp5k\" (UniqueName: \"kubernetes.io/projected/ed46f68f-a023-405d-8b70-4195b82ed6eb-kube-api-access-cvp5k\") pod \"barbican-39b0-account-create-slfc6\" (UID: \"ed46f68f-a023-405d-8b70-4195b82ed6eb\") " pod="openstack/barbican-39b0-account-create-slfc6" Oct 06 13:56:42 crc kubenswrapper[4757]: I1006 13:56:42.274439 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cvp5k\" (UniqueName: \"kubernetes.io/projected/ed46f68f-a023-405d-8b70-4195b82ed6eb-kube-api-access-cvp5k\") pod \"barbican-39b0-account-create-slfc6\" (UID: \"ed46f68f-a023-405d-8b70-4195b82ed6eb\") " pod="openstack/barbican-39b0-account-create-slfc6" Oct 06 13:56:42 crc kubenswrapper[4757]: I1006 13:56:42.357552 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pr5qv\" (UniqueName: \"kubernetes.io/projected/22c17521-7f6c-4704-92c2-e075c6e35f5d-kube-api-access-pr5qv\") pod \"cinder-9f43-account-create-8588z\" (UID: \"22c17521-7f6c-4704-92c2-e075c6e35f5d\") " pod="openstack/cinder-9f43-account-create-8588z" Oct 06 13:56:42 crc kubenswrapper[4757]: I1006 13:56:42.378870 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pr5qv\" (UniqueName: \"kubernetes.io/projected/22c17521-7f6c-4704-92c2-e075c6e35f5d-kube-api-access-pr5qv\") pod \"cinder-9f43-account-create-8588z\" (UID: \"22c17521-7f6c-4704-92c2-e075c6e35f5d\") " pod="openstack/cinder-9f43-account-create-8588z" Oct 06 13:56:42 crc kubenswrapper[4757]: I1006 13:56:42.400802 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-39b0-account-create-slfc6" Oct 06 13:56:42 crc kubenswrapper[4757]: I1006 13:56:42.461036 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-bswtq" Oct 06 13:56:42 crc kubenswrapper[4757]: I1006 13:56:42.461162 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-bswtq" event={"ID":"b257515d-a807-4a28-8b28-0e6390aa9d42","Type":"ContainerDied","Data":"d7718981c31b8f0fcedf260f4db1b1e4a3202fc0e3bb0e6bf83ac1443cd9be4f"} Oct 06 13:56:42 crc kubenswrapper[4757]: I1006 13:56:42.461194 4757 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d7718981c31b8f0fcedf260f4db1b1e4a3202fc0e3bb0e6bf83ac1443cd9be4f" Oct 06 13:56:42 crc kubenswrapper[4757]: I1006 13:56:42.487207 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-2bb5-account-create-8m8c6"] Oct 06 13:56:42 crc kubenswrapper[4757]: I1006 13:56:42.488806 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-2bb5-account-create-8m8c6" Oct 06 13:56:42 crc kubenswrapper[4757]: I1006 13:56:42.490336 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Oct 06 13:56:42 crc kubenswrapper[4757]: I1006 13:56:42.494727 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-9f43-account-create-8588z" Oct 06 13:56:42 crc kubenswrapper[4757]: I1006 13:56:42.504762 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-2bb5-account-create-8m8c6"] Oct 06 13:56:42 crc kubenswrapper[4757]: I1006 13:56:42.564839 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pth8q\" (UniqueName: \"kubernetes.io/projected/27a36069-f211-4f8e-9512-f9e08f252f5d-kube-api-access-pth8q\") pod \"neutron-2bb5-account-create-8m8c6\" (UID: \"27a36069-f211-4f8e-9512-f9e08f252f5d\") " pod="openstack/neutron-2bb5-account-create-8m8c6" Oct 06 13:56:42 crc kubenswrapper[4757]: I1006 13:56:42.668296 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pth8q\" (UniqueName: \"kubernetes.io/projected/27a36069-f211-4f8e-9512-f9e08f252f5d-kube-api-access-pth8q\") pod \"neutron-2bb5-account-create-8m8c6\" (UID: \"27a36069-f211-4f8e-9512-f9e08f252f5d\") " pod="openstack/neutron-2bb5-account-create-8m8c6" Oct 06 13:56:42 crc kubenswrapper[4757]: I1006 13:56:42.689262 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pth8q\" (UniqueName: \"kubernetes.io/projected/27a36069-f211-4f8e-9512-f9e08f252f5d-kube-api-access-pth8q\") pod \"neutron-2bb5-account-create-8m8c6\" (UID: \"27a36069-f211-4f8e-9512-f9e08f252f5d\") " pod="openstack/neutron-2bb5-account-create-8m8c6" Oct 06 13:56:42 crc kubenswrapper[4757]: I1006 13:56:42.816307 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-2bb5-account-create-8m8c6" Oct 06 13:56:42 crc kubenswrapper[4757]: I1006 13:56:42.824420 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6cf7447b97-crmfk"] Oct 06 13:56:42 crc kubenswrapper[4757]: I1006 13:56:42.825947 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6cf7447b97-crmfk" Oct 06 13:56:42 crc kubenswrapper[4757]: I1006 13:56:42.830898 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6cf7447b97-crmfk"] Oct 06 13:56:42 crc kubenswrapper[4757]: I1006 13:56:42.882393 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8dd7baea-3673-4c7c-be1f-80e4f3ba2ad9-ovsdbserver-nb\") pod \"dnsmasq-dns-6cf7447b97-crmfk\" (UID: \"8dd7baea-3673-4c7c-be1f-80e4f3ba2ad9\") " pod="openstack/dnsmasq-dns-6cf7447b97-crmfk" Oct 06 13:56:42 crc kubenswrapper[4757]: I1006 13:56:42.882440 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8dd7baea-3673-4c7c-be1f-80e4f3ba2ad9-config\") pod \"dnsmasq-dns-6cf7447b97-crmfk\" (UID: \"8dd7baea-3673-4c7c-be1f-80e4f3ba2ad9\") " pod="openstack/dnsmasq-dns-6cf7447b97-crmfk" Oct 06 13:56:42 crc kubenswrapper[4757]: I1006 13:56:42.882477 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8dd7baea-3673-4c7c-be1f-80e4f3ba2ad9-dns-svc\") pod \"dnsmasq-dns-6cf7447b97-crmfk\" (UID: \"8dd7baea-3673-4c7c-be1f-80e4f3ba2ad9\") " pod="openstack/dnsmasq-dns-6cf7447b97-crmfk" Oct 06 13:56:42 crc kubenswrapper[4757]: I1006 13:56:42.882504 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8dd7baea-3673-4c7c-be1f-80e4f3ba2ad9-dns-swift-storage-0\") pod \"dnsmasq-dns-6cf7447b97-crmfk\" (UID: \"8dd7baea-3673-4c7c-be1f-80e4f3ba2ad9\") " pod="openstack/dnsmasq-dns-6cf7447b97-crmfk" Oct 06 13:56:42 crc kubenswrapper[4757]: I1006 13:56:42.882739 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8dd7baea-3673-4c7c-be1f-80e4f3ba2ad9-ovsdbserver-sb\") pod \"dnsmasq-dns-6cf7447b97-crmfk\" (UID: \"8dd7baea-3673-4c7c-be1f-80e4f3ba2ad9\") " pod="openstack/dnsmasq-dns-6cf7447b97-crmfk" Oct 06 13:56:42 crc kubenswrapper[4757]: I1006 13:56:42.882793 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rd5pj\" (UniqueName: \"kubernetes.io/projected/8dd7baea-3673-4c7c-be1f-80e4f3ba2ad9-kube-api-access-rd5pj\") pod \"dnsmasq-dns-6cf7447b97-crmfk\" (UID: \"8dd7baea-3673-4c7c-be1f-80e4f3ba2ad9\") " pod="openstack/dnsmasq-dns-6cf7447b97-crmfk" Oct 06 13:56:42 crc kubenswrapper[4757]: I1006 13:56:42.929891 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-39b0-account-create-slfc6"] Oct 06 13:56:42 crc kubenswrapper[4757]: W1006 13:56:42.954642 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poded46f68f_a023_405d_8b70_4195b82ed6eb.slice/crio-537c4cfbfabb78d1adbeb7cba0e6401cf62031a49f956f9269cee02a66fd25f2 WatchSource:0}: Error finding container 537c4cfbfabb78d1adbeb7cba0e6401cf62031a49f956f9269cee02a66fd25f2: Status 404 returned error can't find the container with id 537c4cfbfabb78d1adbeb7cba0e6401cf62031a49f956f9269cee02a66fd25f2 Oct 06 13:56:42 crc kubenswrapper[4757]: I1006 13:56:42.984806 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8dd7baea-3673-4c7c-be1f-80e4f3ba2ad9-ovsdbserver-nb\") pod \"dnsmasq-dns-6cf7447b97-crmfk\" (UID: \"8dd7baea-3673-4c7c-be1f-80e4f3ba2ad9\") " pod="openstack/dnsmasq-dns-6cf7447b97-crmfk" Oct 06 13:56:42 crc kubenswrapper[4757]: I1006 13:56:42.985180 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8dd7baea-3673-4c7c-be1f-80e4f3ba2ad9-config\") pod \"dnsmasq-dns-6cf7447b97-crmfk\" (UID: \"8dd7baea-3673-4c7c-be1f-80e4f3ba2ad9\") " pod="openstack/dnsmasq-dns-6cf7447b97-crmfk" Oct 06 13:56:42 crc kubenswrapper[4757]: I1006 13:56:42.985243 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8dd7baea-3673-4c7c-be1f-80e4f3ba2ad9-dns-svc\") pod \"dnsmasq-dns-6cf7447b97-crmfk\" (UID: \"8dd7baea-3673-4c7c-be1f-80e4f3ba2ad9\") " pod="openstack/dnsmasq-dns-6cf7447b97-crmfk" Oct 06 13:56:42 crc kubenswrapper[4757]: I1006 13:56:42.985462 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8dd7baea-3673-4c7c-be1f-80e4f3ba2ad9-dns-swift-storage-0\") pod \"dnsmasq-dns-6cf7447b97-crmfk\" (UID: \"8dd7baea-3673-4c7c-be1f-80e4f3ba2ad9\") " pod="openstack/dnsmasq-dns-6cf7447b97-crmfk" Oct 06 13:56:42 crc kubenswrapper[4757]: I1006 13:56:42.986555 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8dd7baea-3673-4c7c-be1f-80e4f3ba2ad9-config\") pod \"dnsmasq-dns-6cf7447b97-crmfk\" (UID: \"8dd7baea-3673-4c7c-be1f-80e4f3ba2ad9\") " pod="openstack/dnsmasq-dns-6cf7447b97-crmfk" Oct 06 13:56:42 crc kubenswrapper[4757]: I1006 13:56:42.986592 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8dd7baea-3673-4c7c-be1f-80e4f3ba2ad9-ovsdbserver-nb\") pod \"dnsmasq-dns-6cf7447b97-crmfk\" (UID: \"8dd7baea-3673-4c7c-be1f-80e4f3ba2ad9\") " pod="openstack/dnsmasq-dns-6cf7447b97-crmfk" Oct 06 13:56:42 crc kubenswrapper[4757]: I1006 13:56:42.987656 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8dd7baea-3673-4c7c-be1f-80e4f3ba2ad9-dns-swift-storage-0\") pod \"dnsmasq-dns-6cf7447b97-crmfk\" (UID: \"8dd7baea-3673-4c7c-be1f-80e4f3ba2ad9\") " pod="openstack/dnsmasq-dns-6cf7447b97-crmfk" Oct 06 13:56:42 crc kubenswrapper[4757]: I1006 13:56:42.987758 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8dd7baea-3673-4c7c-be1f-80e4f3ba2ad9-ovsdbserver-sb\") pod \"dnsmasq-dns-6cf7447b97-crmfk\" (UID: \"8dd7baea-3673-4c7c-be1f-80e4f3ba2ad9\") " pod="openstack/dnsmasq-dns-6cf7447b97-crmfk" Oct 06 13:56:42 crc kubenswrapper[4757]: I1006 13:56:42.987801 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rd5pj\" (UniqueName: \"kubernetes.io/projected/8dd7baea-3673-4c7c-be1f-80e4f3ba2ad9-kube-api-access-rd5pj\") pod \"dnsmasq-dns-6cf7447b97-crmfk\" (UID: \"8dd7baea-3673-4c7c-be1f-80e4f3ba2ad9\") " pod="openstack/dnsmasq-dns-6cf7447b97-crmfk" Oct 06 13:56:42 crc kubenswrapper[4757]: I1006 13:56:42.988648 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8dd7baea-3673-4c7c-be1f-80e4f3ba2ad9-dns-svc\") pod \"dnsmasq-dns-6cf7447b97-crmfk\" (UID: \"8dd7baea-3673-4c7c-be1f-80e4f3ba2ad9\") " pod="openstack/dnsmasq-dns-6cf7447b97-crmfk" Oct 06 13:56:42 crc kubenswrapper[4757]: I1006 13:56:42.988925 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8dd7baea-3673-4c7c-be1f-80e4f3ba2ad9-ovsdbserver-sb\") pod \"dnsmasq-dns-6cf7447b97-crmfk\" (UID: \"8dd7baea-3673-4c7c-be1f-80e4f3ba2ad9\") " pod="openstack/dnsmasq-dns-6cf7447b97-crmfk" Oct 06 13:56:43 crc kubenswrapper[4757]: I1006 13:56:43.007461 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rd5pj\" (UniqueName: \"kubernetes.io/projected/8dd7baea-3673-4c7c-be1f-80e4f3ba2ad9-kube-api-access-rd5pj\") pod \"dnsmasq-dns-6cf7447b97-crmfk\" (UID: \"8dd7baea-3673-4c7c-be1f-80e4f3ba2ad9\") " pod="openstack/dnsmasq-dns-6cf7447b97-crmfk" Oct 06 13:56:43 crc kubenswrapper[4757]: I1006 13:56:43.011691 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-ms6bn" Oct 06 13:56:43 crc kubenswrapper[4757]: I1006 13:56:43.090399 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e243d6cc-b188-4dbb-a95e-21b9e6d52eeb-config-data\") pod \"e243d6cc-b188-4dbb-a95e-21b9e6d52eeb\" (UID: \"e243d6cc-b188-4dbb-a95e-21b9e6d52eeb\") " Oct 06 13:56:43 crc kubenswrapper[4757]: I1006 13:56:43.090535 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p6pnq\" (UniqueName: \"kubernetes.io/projected/e243d6cc-b188-4dbb-a95e-21b9e6d52eeb-kube-api-access-p6pnq\") pod \"e243d6cc-b188-4dbb-a95e-21b9e6d52eeb\" (UID: \"e243d6cc-b188-4dbb-a95e-21b9e6d52eeb\") " Oct 06 13:56:43 crc kubenswrapper[4757]: I1006 13:56:43.090632 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e243d6cc-b188-4dbb-a95e-21b9e6d52eeb-combined-ca-bundle\") pod \"e243d6cc-b188-4dbb-a95e-21b9e6d52eeb\" (UID: \"e243d6cc-b188-4dbb-a95e-21b9e6d52eeb\") " Oct 06 13:56:43 crc kubenswrapper[4757]: I1006 13:56:43.094684 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e243d6cc-b188-4dbb-a95e-21b9e6d52eeb-kube-api-access-p6pnq" (OuterVolumeSpecName: "kube-api-access-p6pnq") pod "e243d6cc-b188-4dbb-a95e-21b9e6d52eeb" (UID: "e243d6cc-b188-4dbb-a95e-21b9e6d52eeb"). InnerVolumeSpecName "kube-api-access-p6pnq". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:56:43 crc kubenswrapper[4757]: I1006 13:56:43.144340 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-9f43-account-create-8588z"] Oct 06 13:56:43 crc kubenswrapper[4757]: I1006 13:56:43.150246 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e243d6cc-b188-4dbb-a95e-21b9e6d52eeb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e243d6cc-b188-4dbb-a95e-21b9e6d52eeb" (UID: "e243d6cc-b188-4dbb-a95e-21b9e6d52eeb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:56:43 crc kubenswrapper[4757]: I1006 13:56:43.154536 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6cf7447b97-crmfk" Oct 06 13:56:43 crc kubenswrapper[4757]: W1006 13:56:43.155215 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod22c17521_7f6c_4704_92c2_e075c6e35f5d.slice/crio-ac7db629b809832c6946fd2f849cabd27ddd1a9bb352b7ca5a2e00aa45c1cdc5 WatchSource:0}: Error finding container ac7db629b809832c6946fd2f849cabd27ddd1a9bb352b7ca5a2e00aa45c1cdc5: Status 404 returned error can't find the container with id ac7db629b809832c6946fd2f849cabd27ddd1a9bb352b7ca5a2e00aa45c1cdc5 Oct 06 13:56:43 crc kubenswrapper[4757]: I1006 13:56:43.166421 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e243d6cc-b188-4dbb-a95e-21b9e6d52eeb-config-data" (OuterVolumeSpecName: "config-data") pod "e243d6cc-b188-4dbb-a95e-21b9e6d52eeb" (UID: "e243d6cc-b188-4dbb-a95e-21b9e6d52eeb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:56:43 crc kubenswrapper[4757]: I1006 13:56:43.193691 4757 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e243d6cc-b188-4dbb-a95e-21b9e6d52eeb-config-data\") on node \"crc\" DevicePath \"\"" Oct 06 13:56:43 crc kubenswrapper[4757]: I1006 13:56:43.193724 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p6pnq\" (UniqueName: \"kubernetes.io/projected/e243d6cc-b188-4dbb-a95e-21b9e6d52eeb-kube-api-access-p6pnq\") on node \"crc\" DevicePath \"\"" Oct 06 13:56:43 crc kubenswrapper[4757]: I1006 13:56:43.193738 4757 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e243d6cc-b188-4dbb-a95e-21b9e6d52eeb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 06 13:56:43 crc kubenswrapper[4757]: I1006 13:56:43.395420 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-2bb5-account-create-8m8c6"] Oct 06 13:56:43 crc kubenswrapper[4757]: I1006 13:56:43.473564 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-9f43-account-create-8588z" event={"ID":"22c17521-7f6c-4704-92c2-e075c6e35f5d","Type":"ContainerStarted","Data":"ac7db629b809832c6946fd2f849cabd27ddd1a9bb352b7ca5a2e00aa45c1cdc5"} Oct 06 13:56:43 crc kubenswrapper[4757]: I1006 13:56:43.480387 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-2bb5-account-create-8m8c6" event={"ID":"27a36069-f211-4f8e-9512-f9e08f252f5d","Type":"ContainerStarted","Data":"2227288ce2aded2ed4ffb423684c473211e5be34b1d4be8c07cec460cfbdfc73"} Oct 06 13:56:43 crc kubenswrapper[4757]: I1006 13:56:43.482539 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-ms6bn" event={"ID":"e243d6cc-b188-4dbb-a95e-21b9e6d52eeb","Type":"ContainerDied","Data":"2348c8261ee42310eae93debff80ea7266134a6c35865280afc42eec7a0793e0"} Oct 06 13:56:43 crc kubenswrapper[4757]: I1006 13:56:43.482556 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-ms6bn" Oct 06 13:56:43 crc kubenswrapper[4757]: I1006 13:56:43.482573 4757 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2348c8261ee42310eae93debff80ea7266134a6c35865280afc42eec7a0793e0" Oct 06 13:56:43 crc kubenswrapper[4757]: I1006 13:56:43.485457 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-39b0-account-create-slfc6" event={"ID":"ed46f68f-a023-405d-8b70-4195b82ed6eb","Type":"ContainerStarted","Data":"63d81f209fa475240764f493ecd86259a67b861ff2a98cd14b6c0cc2ae647646"} Oct 06 13:56:43 crc kubenswrapper[4757]: I1006 13:56:43.485489 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-39b0-account-create-slfc6" event={"ID":"ed46f68f-a023-405d-8b70-4195b82ed6eb","Type":"ContainerStarted","Data":"537c4cfbfabb78d1adbeb7cba0e6401cf62031a49f956f9269cee02a66fd25f2"} Oct 06 13:56:43 crc kubenswrapper[4757]: I1006 13:56:43.510376 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-39b0-account-create-slfc6" podStartSLOduration=1.510357679 podStartE2EDuration="1.510357679s" podCreationTimestamp="2025-10-06 13:56:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:56:43.499275607 +0000 UTC m=+1091.996594164" watchObservedRunningTime="2025-10-06 13:56:43.510357679 +0000 UTC m=+1092.007676216" Oct 06 13:56:43 crc kubenswrapper[4757]: I1006 13:56:43.657871 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6cf7447b97-crmfk"] Oct 06 13:56:43 crc kubenswrapper[4757]: I1006 13:56:43.700780 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6cf7447b97-crmfk"] Oct 06 13:56:43 crc kubenswrapper[4757]: I1006 13:56:43.753574 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6b45d5f8d7-shsmn"] Oct 06 13:56:43 crc kubenswrapper[4757]: E1006 13:56:43.754006 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e243d6cc-b188-4dbb-a95e-21b9e6d52eeb" containerName="keystone-db-sync" Oct 06 13:56:43 crc kubenswrapper[4757]: I1006 13:56:43.754039 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="e243d6cc-b188-4dbb-a95e-21b9e6d52eeb" containerName="keystone-db-sync" Oct 06 13:56:43 crc kubenswrapper[4757]: I1006 13:56:43.754246 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="e243d6cc-b188-4dbb-a95e-21b9e6d52eeb" containerName="keystone-db-sync" Oct 06 13:56:43 crc kubenswrapper[4757]: I1006 13:56:43.755120 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6b45d5f8d7-shsmn" Oct 06 13:56:43 crc kubenswrapper[4757]: I1006 13:56:43.764946 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-csjmt"] Oct 06 13:56:43 crc kubenswrapper[4757]: I1006 13:56:43.766014 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-csjmt" Oct 06 13:56:43 crc kubenswrapper[4757]: I1006 13:56:43.770939 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Oct 06 13:56:43 crc kubenswrapper[4757]: I1006 13:56:43.771441 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-d9pz6" Oct 06 13:56:43 crc kubenswrapper[4757]: I1006 13:56:43.771680 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Oct 06 13:56:43 crc kubenswrapper[4757]: I1006 13:56:43.771846 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Oct 06 13:56:43 crc kubenswrapper[4757]: I1006 13:56:43.786398 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6b45d5f8d7-shsmn"] Oct 06 13:56:43 crc kubenswrapper[4757]: I1006 13:56:43.799845 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-csjmt"] Oct 06 13:56:43 crc kubenswrapper[4757]: I1006 13:56:43.808676 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/21de8b21-690d-43f2-a629-019ca257c579-dns-swift-storage-0\") pod \"dnsmasq-dns-6b45d5f8d7-shsmn\" (UID: \"21de8b21-690d-43f2-a629-019ca257c579\") " pod="openstack/dnsmasq-dns-6b45d5f8d7-shsmn" Oct 06 13:56:43 crc kubenswrapper[4757]: I1006 13:56:43.808772 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c1c587f7-5a59-48ad-8876-a8ca7260d9e1-scripts\") pod \"keystone-bootstrap-csjmt\" (UID: \"c1c587f7-5a59-48ad-8876-a8ca7260d9e1\") " pod="openstack/keystone-bootstrap-csjmt" Oct 06 13:56:43 crc kubenswrapper[4757]: I1006 13:56:43.808790 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/c1c587f7-5a59-48ad-8876-a8ca7260d9e1-fernet-keys\") pod \"keystone-bootstrap-csjmt\" (UID: \"c1c587f7-5a59-48ad-8876-a8ca7260d9e1\") " pod="openstack/keystone-bootstrap-csjmt" Oct 06 13:56:43 crc kubenswrapper[4757]: I1006 13:56:43.808812 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xh7fs\" (UniqueName: \"kubernetes.io/projected/c1c587f7-5a59-48ad-8876-a8ca7260d9e1-kube-api-access-xh7fs\") pod \"keystone-bootstrap-csjmt\" (UID: \"c1c587f7-5a59-48ad-8876-a8ca7260d9e1\") " pod="openstack/keystone-bootstrap-csjmt" Oct 06 13:56:43 crc kubenswrapper[4757]: I1006 13:56:43.808831 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/21de8b21-690d-43f2-a629-019ca257c579-ovsdbserver-nb\") pod \"dnsmasq-dns-6b45d5f8d7-shsmn\" (UID: \"21de8b21-690d-43f2-a629-019ca257c579\") " pod="openstack/dnsmasq-dns-6b45d5f8d7-shsmn" Oct 06 13:56:43 crc kubenswrapper[4757]: I1006 13:56:43.808848 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c1c587f7-5a59-48ad-8876-a8ca7260d9e1-combined-ca-bundle\") pod \"keystone-bootstrap-csjmt\" (UID: \"c1c587f7-5a59-48ad-8876-a8ca7260d9e1\") " pod="openstack/keystone-bootstrap-csjmt" Oct 06 13:56:43 crc kubenswrapper[4757]: I1006 13:56:43.808864 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/21de8b21-690d-43f2-a629-019ca257c579-config\") pod \"dnsmasq-dns-6b45d5f8d7-shsmn\" (UID: \"21de8b21-690d-43f2-a629-019ca257c579\") " pod="openstack/dnsmasq-dns-6b45d5f8d7-shsmn" Oct 06 13:56:43 crc kubenswrapper[4757]: I1006 13:56:43.808883 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/21de8b21-690d-43f2-a629-019ca257c579-ovsdbserver-sb\") pod \"dnsmasq-dns-6b45d5f8d7-shsmn\" (UID: \"21de8b21-690d-43f2-a629-019ca257c579\") " pod="openstack/dnsmasq-dns-6b45d5f8d7-shsmn" Oct 06 13:56:43 crc kubenswrapper[4757]: I1006 13:56:43.808905 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/21de8b21-690d-43f2-a629-019ca257c579-dns-svc\") pod \"dnsmasq-dns-6b45d5f8d7-shsmn\" (UID: \"21de8b21-690d-43f2-a629-019ca257c579\") " pod="openstack/dnsmasq-dns-6b45d5f8d7-shsmn" Oct 06 13:56:43 crc kubenswrapper[4757]: I1006 13:56:43.808924 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bf9ld\" (UniqueName: \"kubernetes.io/projected/21de8b21-690d-43f2-a629-019ca257c579-kube-api-access-bf9ld\") pod \"dnsmasq-dns-6b45d5f8d7-shsmn\" (UID: \"21de8b21-690d-43f2-a629-019ca257c579\") " pod="openstack/dnsmasq-dns-6b45d5f8d7-shsmn" Oct 06 13:56:43 crc kubenswrapper[4757]: I1006 13:56:43.808965 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/c1c587f7-5a59-48ad-8876-a8ca7260d9e1-credential-keys\") pod \"keystone-bootstrap-csjmt\" (UID: \"c1c587f7-5a59-48ad-8876-a8ca7260d9e1\") " pod="openstack/keystone-bootstrap-csjmt" Oct 06 13:56:43 crc kubenswrapper[4757]: I1006 13:56:43.808986 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c1c587f7-5a59-48ad-8876-a8ca7260d9e1-config-data\") pod \"keystone-bootstrap-csjmt\" (UID: \"c1c587f7-5a59-48ad-8876-a8ca7260d9e1\") " pod="openstack/keystone-bootstrap-csjmt" Oct 06 13:56:43 crc kubenswrapper[4757]: I1006 13:56:43.910336 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/21de8b21-690d-43f2-a629-019ca257c579-dns-svc\") pod \"dnsmasq-dns-6b45d5f8d7-shsmn\" (UID: \"21de8b21-690d-43f2-a629-019ca257c579\") " pod="openstack/dnsmasq-dns-6b45d5f8d7-shsmn" Oct 06 13:56:43 crc kubenswrapper[4757]: I1006 13:56:43.910402 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bf9ld\" (UniqueName: \"kubernetes.io/projected/21de8b21-690d-43f2-a629-019ca257c579-kube-api-access-bf9ld\") pod \"dnsmasq-dns-6b45d5f8d7-shsmn\" (UID: \"21de8b21-690d-43f2-a629-019ca257c579\") " pod="openstack/dnsmasq-dns-6b45d5f8d7-shsmn" Oct 06 13:56:43 crc kubenswrapper[4757]: I1006 13:56:43.910453 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/c1c587f7-5a59-48ad-8876-a8ca7260d9e1-credential-keys\") pod \"keystone-bootstrap-csjmt\" (UID: \"c1c587f7-5a59-48ad-8876-a8ca7260d9e1\") " pod="openstack/keystone-bootstrap-csjmt" Oct 06 13:56:43 crc kubenswrapper[4757]: I1006 13:56:43.910475 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c1c587f7-5a59-48ad-8876-a8ca7260d9e1-config-data\") pod \"keystone-bootstrap-csjmt\" (UID: \"c1c587f7-5a59-48ad-8876-a8ca7260d9e1\") " pod="openstack/keystone-bootstrap-csjmt" Oct 06 13:56:43 crc kubenswrapper[4757]: I1006 13:56:43.910497 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/21de8b21-690d-43f2-a629-019ca257c579-dns-swift-storage-0\") pod \"dnsmasq-dns-6b45d5f8d7-shsmn\" (UID: \"21de8b21-690d-43f2-a629-019ca257c579\") " pod="openstack/dnsmasq-dns-6b45d5f8d7-shsmn" Oct 06 13:56:43 crc kubenswrapper[4757]: I1006 13:56:43.910569 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c1c587f7-5a59-48ad-8876-a8ca7260d9e1-scripts\") pod \"keystone-bootstrap-csjmt\" (UID: \"c1c587f7-5a59-48ad-8876-a8ca7260d9e1\") " pod="openstack/keystone-bootstrap-csjmt" Oct 06 13:56:43 crc kubenswrapper[4757]: I1006 13:56:43.910595 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/c1c587f7-5a59-48ad-8876-a8ca7260d9e1-fernet-keys\") pod \"keystone-bootstrap-csjmt\" (UID: \"c1c587f7-5a59-48ad-8876-a8ca7260d9e1\") " pod="openstack/keystone-bootstrap-csjmt" Oct 06 13:56:43 crc kubenswrapper[4757]: I1006 13:56:43.910616 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xh7fs\" (UniqueName: \"kubernetes.io/projected/c1c587f7-5a59-48ad-8876-a8ca7260d9e1-kube-api-access-xh7fs\") pod \"keystone-bootstrap-csjmt\" (UID: \"c1c587f7-5a59-48ad-8876-a8ca7260d9e1\") " pod="openstack/keystone-bootstrap-csjmt" Oct 06 13:56:43 crc kubenswrapper[4757]: I1006 13:56:43.910632 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/21de8b21-690d-43f2-a629-019ca257c579-ovsdbserver-nb\") pod \"dnsmasq-dns-6b45d5f8d7-shsmn\" (UID: \"21de8b21-690d-43f2-a629-019ca257c579\") " pod="openstack/dnsmasq-dns-6b45d5f8d7-shsmn" Oct 06 13:56:43 crc kubenswrapper[4757]: I1006 13:56:43.910651 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c1c587f7-5a59-48ad-8876-a8ca7260d9e1-combined-ca-bundle\") pod \"keystone-bootstrap-csjmt\" (UID: \"c1c587f7-5a59-48ad-8876-a8ca7260d9e1\") " pod="openstack/keystone-bootstrap-csjmt" Oct 06 13:56:43 crc kubenswrapper[4757]: I1006 13:56:43.910667 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/21de8b21-690d-43f2-a629-019ca257c579-config\") pod \"dnsmasq-dns-6b45d5f8d7-shsmn\" (UID: \"21de8b21-690d-43f2-a629-019ca257c579\") " pod="openstack/dnsmasq-dns-6b45d5f8d7-shsmn" Oct 06 13:56:43 crc kubenswrapper[4757]: I1006 13:56:43.910682 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/21de8b21-690d-43f2-a629-019ca257c579-ovsdbserver-sb\") pod \"dnsmasq-dns-6b45d5f8d7-shsmn\" (UID: \"21de8b21-690d-43f2-a629-019ca257c579\") " pod="openstack/dnsmasq-dns-6b45d5f8d7-shsmn" Oct 06 13:56:43 crc kubenswrapper[4757]: I1006 13:56:43.911421 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/21de8b21-690d-43f2-a629-019ca257c579-dns-svc\") pod \"dnsmasq-dns-6b45d5f8d7-shsmn\" (UID: \"21de8b21-690d-43f2-a629-019ca257c579\") " pod="openstack/dnsmasq-dns-6b45d5f8d7-shsmn" Oct 06 13:56:43 crc kubenswrapper[4757]: I1006 13:56:43.911565 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/21de8b21-690d-43f2-a629-019ca257c579-ovsdbserver-sb\") pod \"dnsmasq-dns-6b45d5f8d7-shsmn\" (UID: \"21de8b21-690d-43f2-a629-019ca257c579\") " pod="openstack/dnsmasq-dns-6b45d5f8d7-shsmn" Oct 06 13:56:43 crc kubenswrapper[4757]: I1006 13:56:43.913171 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/21de8b21-690d-43f2-a629-019ca257c579-ovsdbserver-nb\") pod \"dnsmasq-dns-6b45d5f8d7-shsmn\" (UID: \"21de8b21-690d-43f2-a629-019ca257c579\") " pod="openstack/dnsmasq-dns-6b45d5f8d7-shsmn" Oct 06 13:56:43 crc kubenswrapper[4757]: I1006 13:56:43.913862 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/21de8b21-690d-43f2-a629-019ca257c579-config\") pod \"dnsmasq-dns-6b45d5f8d7-shsmn\" (UID: \"21de8b21-690d-43f2-a629-019ca257c579\") " pod="openstack/dnsmasq-dns-6b45d5f8d7-shsmn" Oct 06 13:56:43 crc kubenswrapper[4757]: I1006 13:56:43.913972 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/21de8b21-690d-43f2-a629-019ca257c579-dns-swift-storage-0\") pod \"dnsmasq-dns-6b45d5f8d7-shsmn\" (UID: \"21de8b21-690d-43f2-a629-019ca257c579\") " pod="openstack/dnsmasq-dns-6b45d5f8d7-shsmn" Oct 06 13:56:43 crc kubenswrapper[4757]: I1006 13:56:43.926154 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/c1c587f7-5a59-48ad-8876-a8ca7260d9e1-fernet-keys\") pod \"keystone-bootstrap-csjmt\" (UID: \"c1c587f7-5a59-48ad-8876-a8ca7260d9e1\") " pod="openstack/keystone-bootstrap-csjmt" Oct 06 13:56:43 crc kubenswrapper[4757]: I1006 13:56:43.926584 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c1c587f7-5a59-48ad-8876-a8ca7260d9e1-config-data\") pod \"keystone-bootstrap-csjmt\" (UID: \"c1c587f7-5a59-48ad-8876-a8ca7260d9e1\") " pod="openstack/keystone-bootstrap-csjmt" Oct 06 13:56:43 crc kubenswrapper[4757]: I1006 13:56:43.938771 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 06 13:56:43 crc kubenswrapper[4757]: I1006 13:56:43.939399 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c1c587f7-5a59-48ad-8876-a8ca7260d9e1-combined-ca-bundle\") pod \"keystone-bootstrap-csjmt\" (UID: \"c1c587f7-5a59-48ad-8876-a8ca7260d9e1\") " pod="openstack/keystone-bootstrap-csjmt" Oct 06 13:56:43 crc kubenswrapper[4757]: I1006 13:56:43.941490 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/c1c587f7-5a59-48ad-8876-a8ca7260d9e1-credential-keys\") pod \"keystone-bootstrap-csjmt\" (UID: \"c1c587f7-5a59-48ad-8876-a8ca7260d9e1\") " pod="openstack/keystone-bootstrap-csjmt" Oct 06 13:56:43 crc kubenswrapper[4757]: I1006 13:56:43.941719 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 06 13:56:43 crc kubenswrapper[4757]: I1006 13:56:43.963390 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bf9ld\" (UniqueName: \"kubernetes.io/projected/21de8b21-690d-43f2-a629-019ca257c579-kube-api-access-bf9ld\") pod \"dnsmasq-dns-6b45d5f8d7-shsmn\" (UID: \"21de8b21-690d-43f2-a629-019ca257c579\") " pod="openstack/dnsmasq-dns-6b45d5f8d7-shsmn" Oct 06 13:56:43 crc kubenswrapper[4757]: I1006 13:56:43.963614 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 06 13:56:43 crc kubenswrapper[4757]: I1006 13:56:43.968131 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 06 13:56:43 crc kubenswrapper[4757]: I1006 13:56:43.970521 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c1c587f7-5a59-48ad-8876-a8ca7260d9e1-scripts\") pod \"keystone-bootstrap-csjmt\" (UID: \"c1c587f7-5a59-48ad-8876-a8ca7260d9e1\") " pod="openstack/keystone-bootstrap-csjmt" Oct 06 13:56:44 crc kubenswrapper[4757]: I1006 13:56:44.017920 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xh7fs\" (UniqueName: \"kubernetes.io/projected/c1c587f7-5a59-48ad-8876-a8ca7260d9e1-kube-api-access-xh7fs\") pod \"keystone-bootstrap-csjmt\" (UID: \"c1c587f7-5a59-48ad-8876-a8ca7260d9e1\") " pod="openstack/keystone-bootstrap-csjmt" Oct 06 13:56:44 crc kubenswrapper[4757]: I1006 13:56:44.019514 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8-run-httpd\") pod \"ceilometer-0\" (UID: \"fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8\") " pod="openstack/ceilometer-0" Oct 06 13:56:44 crc kubenswrapper[4757]: I1006 13:56:44.019578 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8-scripts\") pod \"ceilometer-0\" (UID: \"fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8\") " pod="openstack/ceilometer-0" Oct 06 13:56:44 crc kubenswrapper[4757]: I1006 13:56:44.019615 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8-config-data\") pod \"ceilometer-0\" (UID: \"fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8\") " pod="openstack/ceilometer-0" Oct 06 13:56:44 crc kubenswrapper[4757]: I1006 13:56:44.019642 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8\") " pod="openstack/ceilometer-0" Oct 06 13:56:44 crc kubenswrapper[4757]: I1006 13:56:44.019663 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8-log-httpd\") pod \"ceilometer-0\" (UID: \"fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8\") " pod="openstack/ceilometer-0" Oct 06 13:56:44 crc kubenswrapper[4757]: I1006 13:56:44.019764 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8\") " pod="openstack/ceilometer-0" Oct 06 13:56:44 crc kubenswrapper[4757]: I1006 13:56:44.019794 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qzr8l\" (UniqueName: \"kubernetes.io/projected/fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8-kube-api-access-qzr8l\") pod \"ceilometer-0\" (UID: \"fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8\") " pod="openstack/ceilometer-0" Oct 06 13:56:44 crc kubenswrapper[4757]: I1006 13:56:44.036422 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 06 13:56:44 crc kubenswrapper[4757]: I1006 13:56:44.114508 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-kssfb"] Oct 06 13:56:44 crc kubenswrapper[4757]: I1006 13:56:44.115732 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-kssfb" Oct 06 13:56:44 crc kubenswrapper[4757]: I1006 13:56:44.117842 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Oct 06 13:56:44 crc kubenswrapper[4757]: I1006 13:56:44.118128 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-bp2vn" Oct 06 13:56:44 crc kubenswrapper[4757]: I1006 13:56:44.118313 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Oct 06 13:56:44 crc kubenswrapper[4757]: I1006 13:56:44.121662 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8\") " pod="openstack/ceilometer-0" Oct 06 13:56:44 crc kubenswrapper[4757]: I1006 13:56:44.121807 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8-log-httpd\") pod \"ceilometer-0\" (UID: \"fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8\") " pod="openstack/ceilometer-0" Oct 06 13:56:44 crc kubenswrapper[4757]: I1006 13:56:44.121895 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8\") " pod="openstack/ceilometer-0" Oct 06 13:56:44 crc kubenswrapper[4757]: I1006 13:56:44.121968 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qzr8l\" (UniqueName: \"kubernetes.io/projected/fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8-kube-api-access-qzr8l\") pod \"ceilometer-0\" (UID: \"fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8\") " pod="openstack/ceilometer-0" Oct 06 13:56:44 crc kubenswrapper[4757]: I1006 13:56:44.122086 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8-run-httpd\") pod \"ceilometer-0\" (UID: \"fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8\") " pod="openstack/ceilometer-0" Oct 06 13:56:44 crc kubenswrapper[4757]: I1006 13:56:44.124261 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8-scripts\") pod \"ceilometer-0\" (UID: \"fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8\") " pod="openstack/ceilometer-0" Oct 06 13:56:44 crc kubenswrapper[4757]: I1006 13:56:44.124381 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8-config-data\") pod \"ceilometer-0\" (UID: \"fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8\") " pod="openstack/ceilometer-0" Oct 06 13:56:44 crc kubenswrapper[4757]: I1006 13:56:44.127499 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8-run-httpd\") pod \"ceilometer-0\" (UID: \"fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8\") " pod="openstack/ceilometer-0" Oct 06 13:56:44 crc kubenswrapper[4757]: I1006 13:56:44.128442 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8-log-httpd\") pod \"ceilometer-0\" (UID: \"fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8\") " pod="openstack/ceilometer-0" Oct 06 13:56:44 crc kubenswrapper[4757]: I1006 13:56:44.128651 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8-config-data\") pod \"ceilometer-0\" (UID: \"fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8\") " pod="openstack/ceilometer-0" Oct 06 13:56:44 crc kubenswrapper[4757]: I1006 13:56:44.130823 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8\") " pod="openstack/ceilometer-0" Oct 06 13:56:44 crc kubenswrapper[4757]: I1006 13:56:44.131708 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-kssfb"] Oct 06 13:56:44 crc kubenswrapper[4757]: I1006 13:56:44.136018 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8-scripts\") pod \"ceilometer-0\" (UID: \"fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8\") " pod="openstack/ceilometer-0" Oct 06 13:56:44 crc kubenswrapper[4757]: I1006 13:56:44.138377 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6b45d5f8d7-shsmn"] Oct 06 13:56:44 crc kubenswrapper[4757]: I1006 13:56:44.152028 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8\") " pod="openstack/ceilometer-0" Oct 06 13:56:44 crc kubenswrapper[4757]: I1006 13:56:44.160443 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qzr8l\" (UniqueName: \"kubernetes.io/projected/fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8-kube-api-access-qzr8l\") pod \"ceilometer-0\" (UID: \"fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8\") " pod="openstack/ceilometer-0" Oct 06 13:56:44 crc kubenswrapper[4757]: I1006 13:56:44.179178 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-59bc97769f-w5b7p"] Oct 06 13:56:44 crc kubenswrapper[4757]: I1006 13:56:44.182353 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59bc97769f-w5b7p" Oct 06 13:56:44 crc kubenswrapper[4757]: I1006 13:56:44.227032 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8b1f051f-58c8-4226-b9f0-1104e5a262f5-ovsdbserver-nb\") pod \"dnsmasq-dns-59bc97769f-w5b7p\" (UID: \"8b1f051f-58c8-4226-b9f0-1104e5a262f5\") " pod="openstack/dnsmasq-dns-59bc97769f-w5b7p" Oct 06 13:56:44 crc kubenswrapper[4757]: I1006 13:56:44.227130 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ced3006-a395-423b-8429-f35beb1398b0-combined-ca-bundle\") pod \"placement-db-sync-kssfb\" (UID: \"1ced3006-a395-423b-8429-f35beb1398b0\") " pod="openstack/placement-db-sync-kssfb" Oct 06 13:56:44 crc kubenswrapper[4757]: I1006 13:56:44.227165 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hxjx9\" (UniqueName: \"kubernetes.io/projected/1ced3006-a395-423b-8429-f35beb1398b0-kube-api-access-hxjx9\") pod \"placement-db-sync-kssfb\" (UID: \"1ced3006-a395-423b-8429-f35beb1398b0\") " pod="openstack/placement-db-sync-kssfb" Oct 06 13:56:44 crc kubenswrapper[4757]: I1006 13:56:44.227189 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1ced3006-a395-423b-8429-f35beb1398b0-logs\") pod \"placement-db-sync-kssfb\" (UID: \"1ced3006-a395-423b-8429-f35beb1398b0\") " pod="openstack/placement-db-sync-kssfb" Oct 06 13:56:44 crc kubenswrapper[4757]: I1006 13:56:44.227242 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wbm4c\" (UniqueName: \"kubernetes.io/projected/8b1f051f-58c8-4226-b9f0-1104e5a262f5-kube-api-access-wbm4c\") pod \"dnsmasq-dns-59bc97769f-w5b7p\" (UID: \"8b1f051f-58c8-4226-b9f0-1104e5a262f5\") " pod="openstack/dnsmasq-dns-59bc97769f-w5b7p" Oct 06 13:56:44 crc kubenswrapper[4757]: I1006 13:56:44.227271 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1ced3006-a395-423b-8429-f35beb1398b0-config-data\") pod \"placement-db-sync-kssfb\" (UID: \"1ced3006-a395-423b-8429-f35beb1398b0\") " pod="openstack/placement-db-sync-kssfb" Oct 06 13:56:44 crc kubenswrapper[4757]: I1006 13:56:44.227309 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8b1f051f-58c8-4226-b9f0-1104e5a262f5-config\") pod \"dnsmasq-dns-59bc97769f-w5b7p\" (UID: \"8b1f051f-58c8-4226-b9f0-1104e5a262f5\") " pod="openstack/dnsmasq-dns-59bc97769f-w5b7p" Oct 06 13:56:44 crc kubenswrapper[4757]: I1006 13:56:44.227338 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8b1f051f-58c8-4226-b9f0-1104e5a262f5-dns-svc\") pod \"dnsmasq-dns-59bc97769f-w5b7p\" (UID: \"8b1f051f-58c8-4226-b9f0-1104e5a262f5\") " pod="openstack/dnsmasq-dns-59bc97769f-w5b7p" Oct 06 13:56:44 crc kubenswrapper[4757]: I1006 13:56:44.227368 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8b1f051f-58c8-4226-b9f0-1104e5a262f5-ovsdbserver-sb\") pod \"dnsmasq-dns-59bc97769f-w5b7p\" (UID: \"8b1f051f-58c8-4226-b9f0-1104e5a262f5\") " pod="openstack/dnsmasq-dns-59bc97769f-w5b7p" Oct 06 13:56:44 crc kubenswrapper[4757]: I1006 13:56:44.227397 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1ced3006-a395-423b-8429-f35beb1398b0-scripts\") pod \"placement-db-sync-kssfb\" (UID: \"1ced3006-a395-423b-8429-f35beb1398b0\") " pod="openstack/placement-db-sync-kssfb" Oct 06 13:56:44 crc kubenswrapper[4757]: I1006 13:56:44.227459 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8b1f051f-58c8-4226-b9f0-1104e5a262f5-dns-swift-storage-0\") pod \"dnsmasq-dns-59bc97769f-w5b7p\" (UID: \"8b1f051f-58c8-4226-b9f0-1104e5a262f5\") " pod="openstack/dnsmasq-dns-59bc97769f-w5b7p" Oct 06 13:56:44 crc kubenswrapper[4757]: I1006 13:56:44.230404 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6b45d5f8d7-shsmn" Oct 06 13:56:44 crc kubenswrapper[4757]: I1006 13:56:44.231421 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-59bc97769f-w5b7p"] Oct 06 13:56:44 crc kubenswrapper[4757]: I1006 13:56:44.237966 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-csjmt" Oct 06 13:56:44 crc kubenswrapper[4757]: I1006 13:56:44.260643 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 06 13:56:44 crc kubenswrapper[4757]: I1006 13:56:44.330990 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wbm4c\" (UniqueName: \"kubernetes.io/projected/8b1f051f-58c8-4226-b9f0-1104e5a262f5-kube-api-access-wbm4c\") pod \"dnsmasq-dns-59bc97769f-w5b7p\" (UID: \"8b1f051f-58c8-4226-b9f0-1104e5a262f5\") " pod="openstack/dnsmasq-dns-59bc97769f-w5b7p" Oct 06 13:56:44 crc kubenswrapper[4757]: I1006 13:56:44.331559 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1ced3006-a395-423b-8429-f35beb1398b0-config-data\") pod \"placement-db-sync-kssfb\" (UID: \"1ced3006-a395-423b-8429-f35beb1398b0\") " pod="openstack/placement-db-sync-kssfb" Oct 06 13:56:44 crc kubenswrapper[4757]: I1006 13:56:44.331934 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8b1f051f-58c8-4226-b9f0-1104e5a262f5-config\") pod \"dnsmasq-dns-59bc97769f-w5b7p\" (UID: \"8b1f051f-58c8-4226-b9f0-1104e5a262f5\") " pod="openstack/dnsmasq-dns-59bc97769f-w5b7p" Oct 06 13:56:44 crc kubenswrapper[4757]: I1006 13:56:44.333467 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8b1f051f-58c8-4226-b9f0-1104e5a262f5-config\") pod \"dnsmasq-dns-59bc97769f-w5b7p\" (UID: \"8b1f051f-58c8-4226-b9f0-1104e5a262f5\") " pod="openstack/dnsmasq-dns-59bc97769f-w5b7p" Oct 06 13:56:44 crc kubenswrapper[4757]: I1006 13:56:44.332024 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8b1f051f-58c8-4226-b9f0-1104e5a262f5-dns-svc\") pod \"dnsmasq-dns-59bc97769f-w5b7p\" (UID: \"8b1f051f-58c8-4226-b9f0-1104e5a262f5\") " pod="openstack/dnsmasq-dns-59bc97769f-w5b7p" Oct 06 13:56:44 crc kubenswrapper[4757]: I1006 13:56:44.334070 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8b1f051f-58c8-4226-b9f0-1104e5a262f5-ovsdbserver-sb\") pod \"dnsmasq-dns-59bc97769f-w5b7p\" (UID: \"8b1f051f-58c8-4226-b9f0-1104e5a262f5\") " pod="openstack/dnsmasq-dns-59bc97769f-w5b7p" Oct 06 13:56:44 crc kubenswrapper[4757]: I1006 13:56:44.334135 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1ced3006-a395-423b-8429-f35beb1398b0-scripts\") pod \"placement-db-sync-kssfb\" (UID: \"1ced3006-a395-423b-8429-f35beb1398b0\") " pod="openstack/placement-db-sync-kssfb" Oct 06 13:56:44 crc kubenswrapper[4757]: I1006 13:56:44.334217 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8b1f051f-58c8-4226-b9f0-1104e5a262f5-dns-swift-storage-0\") pod \"dnsmasq-dns-59bc97769f-w5b7p\" (UID: \"8b1f051f-58c8-4226-b9f0-1104e5a262f5\") " pod="openstack/dnsmasq-dns-59bc97769f-w5b7p" Oct 06 13:56:44 crc kubenswrapper[4757]: I1006 13:56:44.334216 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8b1f051f-58c8-4226-b9f0-1104e5a262f5-dns-svc\") pod \"dnsmasq-dns-59bc97769f-w5b7p\" (UID: \"8b1f051f-58c8-4226-b9f0-1104e5a262f5\") " pod="openstack/dnsmasq-dns-59bc97769f-w5b7p" Oct 06 13:56:44 crc kubenswrapper[4757]: I1006 13:56:44.334388 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8b1f051f-58c8-4226-b9f0-1104e5a262f5-ovsdbserver-nb\") pod \"dnsmasq-dns-59bc97769f-w5b7p\" (UID: \"8b1f051f-58c8-4226-b9f0-1104e5a262f5\") " pod="openstack/dnsmasq-dns-59bc97769f-w5b7p" Oct 06 13:56:44 crc kubenswrapper[4757]: I1006 13:56:44.334437 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ced3006-a395-423b-8429-f35beb1398b0-combined-ca-bundle\") pod \"placement-db-sync-kssfb\" (UID: \"1ced3006-a395-423b-8429-f35beb1398b0\") " pod="openstack/placement-db-sync-kssfb" Oct 06 13:56:44 crc kubenswrapper[4757]: I1006 13:56:44.334509 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hxjx9\" (UniqueName: \"kubernetes.io/projected/1ced3006-a395-423b-8429-f35beb1398b0-kube-api-access-hxjx9\") pod \"placement-db-sync-kssfb\" (UID: \"1ced3006-a395-423b-8429-f35beb1398b0\") " pod="openstack/placement-db-sync-kssfb" Oct 06 13:56:44 crc kubenswrapper[4757]: I1006 13:56:44.334536 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1ced3006-a395-423b-8429-f35beb1398b0-logs\") pod \"placement-db-sync-kssfb\" (UID: \"1ced3006-a395-423b-8429-f35beb1398b0\") " pod="openstack/placement-db-sync-kssfb" Oct 06 13:56:44 crc kubenswrapper[4757]: I1006 13:56:44.334718 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8b1f051f-58c8-4226-b9f0-1104e5a262f5-ovsdbserver-sb\") pod \"dnsmasq-dns-59bc97769f-w5b7p\" (UID: \"8b1f051f-58c8-4226-b9f0-1104e5a262f5\") " pod="openstack/dnsmasq-dns-59bc97769f-w5b7p" Oct 06 13:56:44 crc kubenswrapper[4757]: I1006 13:56:44.340269 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8b1f051f-58c8-4226-b9f0-1104e5a262f5-dns-swift-storage-0\") pod \"dnsmasq-dns-59bc97769f-w5b7p\" (UID: \"8b1f051f-58c8-4226-b9f0-1104e5a262f5\") " pod="openstack/dnsmasq-dns-59bc97769f-w5b7p" Oct 06 13:56:44 crc kubenswrapper[4757]: I1006 13:56:44.344565 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8b1f051f-58c8-4226-b9f0-1104e5a262f5-ovsdbserver-nb\") pod \"dnsmasq-dns-59bc97769f-w5b7p\" (UID: \"8b1f051f-58c8-4226-b9f0-1104e5a262f5\") " pod="openstack/dnsmasq-dns-59bc97769f-w5b7p" Oct 06 13:56:44 crc kubenswrapper[4757]: I1006 13:56:44.347019 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1ced3006-a395-423b-8429-f35beb1398b0-config-data\") pod \"placement-db-sync-kssfb\" (UID: \"1ced3006-a395-423b-8429-f35beb1398b0\") " pod="openstack/placement-db-sync-kssfb" Oct 06 13:56:44 crc kubenswrapper[4757]: I1006 13:56:44.355554 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1ced3006-a395-423b-8429-f35beb1398b0-scripts\") pod \"placement-db-sync-kssfb\" (UID: \"1ced3006-a395-423b-8429-f35beb1398b0\") " pod="openstack/placement-db-sync-kssfb" Oct 06 13:56:44 crc kubenswrapper[4757]: I1006 13:56:44.356148 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1ced3006-a395-423b-8429-f35beb1398b0-logs\") pod \"placement-db-sync-kssfb\" (UID: \"1ced3006-a395-423b-8429-f35beb1398b0\") " pod="openstack/placement-db-sync-kssfb" Oct 06 13:56:44 crc kubenswrapper[4757]: I1006 13:56:44.356655 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ced3006-a395-423b-8429-f35beb1398b0-combined-ca-bundle\") pod \"placement-db-sync-kssfb\" (UID: \"1ced3006-a395-423b-8429-f35beb1398b0\") " pod="openstack/placement-db-sync-kssfb" Oct 06 13:56:44 crc kubenswrapper[4757]: I1006 13:56:44.359980 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wbm4c\" (UniqueName: \"kubernetes.io/projected/8b1f051f-58c8-4226-b9f0-1104e5a262f5-kube-api-access-wbm4c\") pod \"dnsmasq-dns-59bc97769f-w5b7p\" (UID: \"8b1f051f-58c8-4226-b9f0-1104e5a262f5\") " pod="openstack/dnsmasq-dns-59bc97769f-w5b7p" Oct 06 13:56:44 crc kubenswrapper[4757]: I1006 13:56:44.362350 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hxjx9\" (UniqueName: \"kubernetes.io/projected/1ced3006-a395-423b-8429-f35beb1398b0-kube-api-access-hxjx9\") pod \"placement-db-sync-kssfb\" (UID: \"1ced3006-a395-423b-8429-f35beb1398b0\") " pod="openstack/placement-db-sync-kssfb" Oct 06 13:56:44 crc kubenswrapper[4757]: I1006 13:56:44.495727 4757 generic.go:334] "Generic (PLEG): container finished" podID="ed46f68f-a023-405d-8b70-4195b82ed6eb" containerID="63d81f209fa475240764f493ecd86259a67b861ff2a98cd14b6c0cc2ae647646" exitCode=0 Oct 06 13:56:44 crc kubenswrapper[4757]: I1006 13:56:44.496041 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-39b0-account-create-slfc6" event={"ID":"ed46f68f-a023-405d-8b70-4195b82ed6eb","Type":"ContainerDied","Data":"63d81f209fa475240764f493ecd86259a67b861ff2a98cd14b6c0cc2ae647646"} Oct 06 13:56:44 crc kubenswrapper[4757]: I1006 13:56:44.506384 4757 generic.go:334] "Generic (PLEG): container finished" podID="22c17521-7f6c-4704-92c2-e075c6e35f5d" containerID="59cd64048238008a1b88736345245c3cfac17e11de3199977a66e0af74a3cec4" exitCode=0 Oct 06 13:56:44 crc kubenswrapper[4757]: I1006 13:56:44.506489 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-9f43-account-create-8588z" event={"ID":"22c17521-7f6c-4704-92c2-e075c6e35f5d","Type":"ContainerDied","Data":"59cd64048238008a1b88736345245c3cfac17e11de3199977a66e0af74a3cec4"} Oct 06 13:56:44 crc kubenswrapper[4757]: I1006 13:56:44.511457 4757 generic.go:334] "Generic (PLEG): container finished" podID="27a36069-f211-4f8e-9512-f9e08f252f5d" containerID="74d919e3817afeb51a780567751651163b720b8c4b693d9e053959c708eb06f2" exitCode=0 Oct 06 13:56:44 crc kubenswrapper[4757]: I1006 13:56:44.511546 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-2bb5-account-create-8m8c6" event={"ID":"27a36069-f211-4f8e-9512-f9e08f252f5d","Type":"ContainerDied","Data":"74d919e3817afeb51a780567751651163b720b8c4b693d9e053959c708eb06f2"} Oct 06 13:56:44 crc kubenswrapper[4757]: I1006 13:56:44.514104 4757 generic.go:334] "Generic (PLEG): container finished" podID="8dd7baea-3673-4c7c-be1f-80e4f3ba2ad9" containerID="4c64c6dfcfb912aebc0e4960c9695683f3e3acf24b9f3cc6231b8fa0eec7246e" exitCode=0 Oct 06 13:56:44 crc kubenswrapper[4757]: I1006 13:56:44.514148 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6cf7447b97-crmfk" event={"ID":"8dd7baea-3673-4c7c-be1f-80e4f3ba2ad9","Type":"ContainerDied","Data":"4c64c6dfcfb912aebc0e4960c9695683f3e3acf24b9f3cc6231b8fa0eec7246e"} Oct 06 13:56:44 crc kubenswrapper[4757]: I1006 13:56:44.514213 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6cf7447b97-crmfk" event={"ID":"8dd7baea-3673-4c7c-be1f-80e4f3ba2ad9","Type":"ContainerStarted","Data":"1bceeb6f32a9e76ec4b8b249c6465a31fe9953375faaa4c428b2185b78a79b22"} Oct 06 13:56:44 crc kubenswrapper[4757]: I1006 13:56:44.578456 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-kssfb" Oct 06 13:56:44 crc kubenswrapper[4757]: I1006 13:56:44.592874 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59bc97769f-w5b7p" Oct 06 13:56:44 crc kubenswrapper[4757]: I1006 13:56:44.731358 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-csjmt"] Oct 06 13:56:44 crc kubenswrapper[4757]: I1006 13:56:44.841507 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6b45d5f8d7-shsmn"] Oct 06 13:56:44 crc kubenswrapper[4757]: I1006 13:56:44.854119 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Oct 06 13:56:44 crc kubenswrapper[4757]: I1006 13:56:44.855549 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 06 13:56:44 crc kubenswrapper[4757]: I1006 13:56:44.859064 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Oct 06 13:56:44 crc kubenswrapper[4757]: I1006 13:56:44.859216 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Oct 06 13:56:44 crc kubenswrapper[4757]: I1006 13:56:44.859350 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-ksjg8" Oct 06 13:56:44 crc kubenswrapper[4757]: I1006 13:56:44.883700 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 06 13:56:44 crc kubenswrapper[4757]: W1006 13:56:44.889537 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod21de8b21_690d_43f2_a629_019ca257c579.slice/crio-8aac0d4c42a696b1cb3e17b501c925d6d4883962bcc112d21b71acfd733b18c1 WatchSource:0}: Error finding container 8aac0d4c42a696b1cb3e17b501c925d6d4883962bcc112d21b71acfd733b18c1: Status 404 returned error can't find the container with id 8aac0d4c42a696b1cb3e17b501c925d6d4883962bcc112d21b71acfd733b18c1 Oct 06 13:56:44 crc kubenswrapper[4757]: I1006 13:56:44.934682 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 06 13:56:44 crc kubenswrapper[4757]: I1006 13:56:44.936525 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 06 13:56:44 crc kubenswrapper[4757]: I1006 13:56:44.947670 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 06 13:56:44 crc kubenswrapper[4757]: I1006 13:56:44.950612 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Oct 06 13:56:44 crc kubenswrapper[4757]: I1006 13:56:44.954771 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 06 13:56:44 crc kubenswrapper[4757]: W1006 13:56:44.967615 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfe8b5ba6_8cd2_4ca4_aad4_c2bb76da00d8.slice/crio-ba168b87fe13f00fba56bd3b2154e79d489173b1616a451adbf1f5cc08595538 WatchSource:0}: Error finding container ba168b87fe13f00fba56bd3b2154e79d489173b1616a451adbf1f5cc08595538: Status 404 returned error can't find the container with id ba168b87fe13f00fba56bd3b2154e79d489173b1616a451adbf1f5cc08595538 Oct 06 13:56:45 crc kubenswrapper[4757]: I1006 13:56:45.000998 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6cf7447b97-crmfk" Oct 06 13:56:45 crc kubenswrapper[4757]: I1006 13:56:45.056367 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5a24c99b-42cc-41e8-b2e1-44c7850fc605-config-data\") pod \"glance-default-internal-api-0\" (UID: \"5a24c99b-42cc-41e8-b2e1-44c7850fc605\") " pod="openstack/glance-default-internal-api-0" Oct 06 13:56:45 crc kubenswrapper[4757]: I1006 13:56:45.056485 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"184e3393-41a0-494a-abe2-3948d5c57138\") " pod="openstack/glance-default-external-api-0" Oct 06 13:56:45 crc kubenswrapper[4757]: I1006 13:56:45.056751 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/184e3393-41a0-494a-abe2-3948d5c57138-logs\") pod \"glance-default-external-api-0\" (UID: \"184e3393-41a0-494a-abe2-3948d5c57138\") " pod="openstack/glance-default-external-api-0" Oct 06 13:56:45 crc kubenswrapper[4757]: I1006 13:56:45.056778 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5a24c99b-42cc-41e8-b2e1-44c7850fc605-logs\") pod \"glance-default-internal-api-0\" (UID: \"5a24c99b-42cc-41e8-b2e1-44c7850fc605\") " pod="openstack/glance-default-internal-api-0" Oct 06 13:56:45 crc kubenswrapper[4757]: I1006 13:56:45.056834 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/184e3393-41a0-494a-abe2-3948d5c57138-config-data\") pod \"glance-default-external-api-0\" (UID: \"184e3393-41a0-494a-abe2-3948d5c57138\") " pod="openstack/glance-default-external-api-0" Oct 06 13:56:45 crc kubenswrapper[4757]: I1006 13:56:45.056852 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a24c99b-42cc-41e8-b2e1-44c7850fc605-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"5a24c99b-42cc-41e8-b2e1-44c7850fc605\") " pod="openstack/glance-default-internal-api-0" Oct 06 13:56:45 crc kubenswrapper[4757]: I1006 13:56:45.058171 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w245n\" (UniqueName: \"kubernetes.io/projected/5a24c99b-42cc-41e8-b2e1-44c7850fc605-kube-api-access-w245n\") pod \"glance-default-internal-api-0\" (UID: \"5a24c99b-42cc-41e8-b2e1-44c7850fc605\") " pod="openstack/glance-default-internal-api-0" Oct 06 13:56:45 crc kubenswrapper[4757]: I1006 13:56:45.058609 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5a24c99b-42cc-41e8-b2e1-44c7850fc605-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"5a24c99b-42cc-41e8-b2e1-44c7850fc605\") " pod="openstack/glance-default-internal-api-0" Oct 06 13:56:45 crc kubenswrapper[4757]: I1006 13:56:45.058714 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/184e3393-41a0-494a-abe2-3948d5c57138-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"184e3393-41a0-494a-abe2-3948d5c57138\") " pod="openstack/glance-default-external-api-0" Oct 06 13:56:45 crc kubenswrapper[4757]: I1006 13:56:45.058769 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/184e3393-41a0-494a-abe2-3948d5c57138-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"184e3393-41a0-494a-abe2-3948d5c57138\") " pod="openstack/glance-default-external-api-0" Oct 06 13:56:45 crc kubenswrapper[4757]: I1006 13:56:45.058794 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5a24c99b-42cc-41e8-b2e1-44c7850fc605-scripts\") pod \"glance-default-internal-api-0\" (UID: \"5a24c99b-42cc-41e8-b2e1-44c7850fc605\") " pod="openstack/glance-default-internal-api-0" Oct 06 13:56:45 crc kubenswrapper[4757]: I1006 13:56:45.058886 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pfw68\" (UniqueName: \"kubernetes.io/projected/184e3393-41a0-494a-abe2-3948d5c57138-kube-api-access-pfw68\") pod \"glance-default-external-api-0\" (UID: \"184e3393-41a0-494a-abe2-3948d5c57138\") " pod="openstack/glance-default-external-api-0" Oct 06 13:56:45 crc kubenswrapper[4757]: I1006 13:56:45.058919 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/184e3393-41a0-494a-abe2-3948d5c57138-scripts\") pod \"glance-default-external-api-0\" (UID: \"184e3393-41a0-494a-abe2-3948d5c57138\") " pod="openstack/glance-default-external-api-0" Oct 06 13:56:45 crc kubenswrapper[4757]: I1006 13:56:45.059003 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-internal-api-0\" (UID: \"5a24c99b-42cc-41e8-b2e1-44c7850fc605\") " pod="openstack/glance-default-internal-api-0" Oct 06 13:56:45 crc kubenswrapper[4757]: I1006 13:56:45.161912 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8dd7baea-3673-4c7c-be1f-80e4f3ba2ad9-dns-swift-storage-0\") pod \"8dd7baea-3673-4c7c-be1f-80e4f3ba2ad9\" (UID: \"8dd7baea-3673-4c7c-be1f-80e4f3ba2ad9\") " Oct 06 13:56:45 crc kubenswrapper[4757]: I1006 13:56:45.161985 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8dd7baea-3673-4c7c-be1f-80e4f3ba2ad9-ovsdbserver-sb\") pod \"8dd7baea-3673-4c7c-be1f-80e4f3ba2ad9\" (UID: \"8dd7baea-3673-4c7c-be1f-80e4f3ba2ad9\") " Oct 06 13:56:45 crc kubenswrapper[4757]: I1006 13:56:45.162019 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8dd7baea-3673-4c7c-be1f-80e4f3ba2ad9-ovsdbserver-nb\") pod \"8dd7baea-3673-4c7c-be1f-80e4f3ba2ad9\" (UID: \"8dd7baea-3673-4c7c-be1f-80e4f3ba2ad9\") " Oct 06 13:56:45 crc kubenswrapper[4757]: I1006 13:56:45.162077 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8dd7baea-3673-4c7c-be1f-80e4f3ba2ad9-dns-svc\") pod \"8dd7baea-3673-4c7c-be1f-80e4f3ba2ad9\" (UID: \"8dd7baea-3673-4c7c-be1f-80e4f3ba2ad9\") " Oct 06 13:56:45 crc kubenswrapper[4757]: I1006 13:56:45.162130 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8dd7baea-3673-4c7c-be1f-80e4f3ba2ad9-config\") pod \"8dd7baea-3673-4c7c-be1f-80e4f3ba2ad9\" (UID: \"8dd7baea-3673-4c7c-be1f-80e4f3ba2ad9\") " Oct 06 13:56:45 crc kubenswrapper[4757]: I1006 13:56:45.162195 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rd5pj\" (UniqueName: \"kubernetes.io/projected/8dd7baea-3673-4c7c-be1f-80e4f3ba2ad9-kube-api-access-rd5pj\") pod \"8dd7baea-3673-4c7c-be1f-80e4f3ba2ad9\" (UID: \"8dd7baea-3673-4c7c-be1f-80e4f3ba2ad9\") " Oct 06 13:56:45 crc kubenswrapper[4757]: I1006 13:56:45.162436 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-internal-api-0\" (UID: \"5a24c99b-42cc-41e8-b2e1-44c7850fc605\") " pod="openstack/glance-default-internal-api-0" Oct 06 13:56:45 crc kubenswrapper[4757]: I1006 13:56:45.162476 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5a24c99b-42cc-41e8-b2e1-44c7850fc605-config-data\") pod \"glance-default-internal-api-0\" (UID: \"5a24c99b-42cc-41e8-b2e1-44c7850fc605\") " pod="openstack/glance-default-internal-api-0" Oct 06 13:56:45 crc kubenswrapper[4757]: I1006 13:56:45.162500 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"184e3393-41a0-494a-abe2-3948d5c57138\") " pod="openstack/glance-default-external-api-0" Oct 06 13:56:45 crc kubenswrapper[4757]: I1006 13:56:45.162531 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/184e3393-41a0-494a-abe2-3948d5c57138-logs\") pod \"glance-default-external-api-0\" (UID: \"184e3393-41a0-494a-abe2-3948d5c57138\") " pod="openstack/glance-default-external-api-0" Oct 06 13:56:45 crc kubenswrapper[4757]: I1006 13:56:45.162545 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5a24c99b-42cc-41e8-b2e1-44c7850fc605-logs\") pod \"glance-default-internal-api-0\" (UID: \"5a24c99b-42cc-41e8-b2e1-44c7850fc605\") " pod="openstack/glance-default-internal-api-0" Oct 06 13:56:45 crc kubenswrapper[4757]: I1006 13:56:45.162570 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/184e3393-41a0-494a-abe2-3948d5c57138-config-data\") pod \"glance-default-external-api-0\" (UID: \"184e3393-41a0-494a-abe2-3948d5c57138\") " pod="openstack/glance-default-external-api-0" Oct 06 13:56:45 crc kubenswrapper[4757]: I1006 13:56:45.162585 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a24c99b-42cc-41e8-b2e1-44c7850fc605-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"5a24c99b-42cc-41e8-b2e1-44c7850fc605\") " pod="openstack/glance-default-internal-api-0" Oct 06 13:56:45 crc kubenswrapper[4757]: I1006 13:56:45.162613 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w245n\" (UniqueName: \"kubernetes.io/projected/5a24c99b-42cc-41e8-b2e1-44c7850fc605-kube-api-access-w245n\") pod \"glance-default-internal-api-0\" (UID: \"5a24c99b-42cc-41e8-b2e1-44c7850fc605\") " pod="openstack/glance-default-internal-api-0" Oct 06 13:56:45 crc kubenswrapper[4757]: I1006 13:56:45.162644 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5a24c99b-42cc-41e8-b2e1-44c7850fc605-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"5a24c99b-42cc-41e8-b2e1-44c7850fc605\") " pod="openstack/glance-default-internal-api-0" Oct 06 13:56:45 crc kubenswrapper[4757]: I1006 13:56:45.162670 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/184e3393-41a0-494a-abe2-3948d5c57138-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"184e3393-41a0-494a-abe2-3948d5c57138\") " pod="openstack/glance-default-external-api-0" Oct 06 13:56:45 crc kubenswrapper[4757]: I1006 13:56:45.162692 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/184e3393-41a0-494a-abe2-3948d5c57138-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"184e3393-41a0-494a-abe2-3948d5c57138\") " pod="openstack/glance-default-external-api-0" Oct 06 13:56:45 crc kubenswrapper[4757]: I1006 13:56:45.162707 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5a24c99b-42cc-41e8-b2e1-44c7850fc605-scripts\") pod \"glance-default-internal-api-0\" (UID: \"5a24c99b-42cc-41e8-b2e1-44c7850fc605\") " pod="openstack/glance-default-internal-api-0" Oct 06 13:56:45 crc kubenswrapper[4757]: I1006 13:56:45.162739 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pfw68\" (UniqueName: \"kubernetes.io/projected/184e3393-41a0-494a-abe2-3948d5c57138-kube-api-access-pfw68\") pod \"glance-default-external-api-0\" (UID: \"184e3393-41a0-494a-abe2-3948d5c57138\") " pod="openstack/glance-default-external-api-0" Oct 06 13:56:45 crc kubenswrapper[4757]: I1006 13:56:45.162761 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/184e3393-41a0-494a-abe2-3948d5c57138-scripts\") pod \"glance-default-external-api-0\" (UID: \"184e3393-41a0-494a-abe2-3948d5c57138\") " pod="openstack/glance-default-external-api-0" Oct 06 13:56:45 crc kubenswrapper[4757]: I1006 13:56:45.163452 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5a24c99b-42cc-41e8-b2e1-44c7850fc605-logs\") pod \"glance-default-internal-api-0\" (UID: \"5a24c99b-42cc-41e8-b2e1-44c7850fc605\") " pod="openstack/glance-default-internal-api-0" Oct 06 13:56:45 crc kubenswrapper[4757]: I1006 13:56:45.163533 4757 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-internal-api-0\" (UID: \"5a24c99b-42cc-41e8-b2e1-44c7850fc605\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/glance-default-internal-api-0" Oct 06 13:56:45 crc kubenswrapper[4757]: I1006 13:56:45.163679 4757 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"184e3393-41a0-494a-abe2-3948d5c57138\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/glance-default-external-api-0" Oct 06 13:56:45 crc kubenswrapper[4757]: I1006 13:56:45.163952 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5a24c99b-42cc-41e8-b2e1-44c7850fc605-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"5a24c99b-42cc-41e8-b2e1-44c7850fc605\") " pod="openstack/glance-default-internal-api-0" Oct 06 13:56:45 crc kubenswrapper[4757]: I1006 13:56:45.165643 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/184e3393-41a0-494a-abe2-3948d5c57138-logs\") pod \"glance-default-external-api-0\" (UID: \"184e3393-41a0-494a-abe2-3948d5c57138\") " pod="openstack/glance-default-external-api-0" Oct 06 13:56:45 crc kubenswrapper[4757]: I1006 13:56:45.165867 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/184e3393-41a0-494a-abe2-3948d5c57138-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"184e3393-41a0-494a-abe2-3948d5c57138\") " pod="openstack/glance-default-external-api-0" Oct 06 13:56:45 crc kubenswrapper[4757]: I1006 13:56:45.169318 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/184e3393-41a0-494a-abe2-3948d5c57138-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"184e3393-41a0-494a-abe2-3948d5c57138\") " pod="openstack/glance-default-external-api-0" Oct 06 13:56:45 crc kubenswrapper[4757]: I1006 13:56:45.175504 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5a24c99b-42cc-41e8-b2e1-44c7850fc605-config-data\") pod \"glance-default-internal-api-0\" (UID: \"5a24c99b-42cc-41e8-b2e1-44c7850fc605\") " pod="openstack/glance-default-internal-api-0" Oct 06 13:56:45 crc kubenswrapper[4757]: I1006 13:56:45.176525 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/184e3393-41a0-494a-abe2-3948d5c57138-scripts\") pod \"glance-default-external-api-0\" (UID: \"184e3393-41a0-494a-abe2-3948d5c57138\") " pod="openstack/glance-default-external-api-0" Oct 06 13:56:45 crc kubenswrapper[4757]: I1006 13:56:45.179471 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/184e3393-41a0-494a-abe2-3948d5c57138-config-data\") pod \"glance-default-external-api-0\" (UID: \"184e3393-41a0-494a-abe2-3948d5c57138\") " pod="openstack/glance-default-external-api-0" Oct 06 13:56:45 crc kubenswrapper[4757]: I1006 13:56:45.180518 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8dd7baea-3673-4c7c-be1f-80e4f3ba2ad9-kube-api-access-rd5pj" (OuterVolumeSpecName: "kube-api-access-rd5pj") pod "8dd7baea-3673-4c7c-be1f-80e4f3ba2ad9" (UID: "8dd7baea-3673-4c7c-be1f-80e4f3ba2ad9"). InnerVolumeSpecName "kube-api-access-rd5pj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:56:45 crc kubenswrapper[4757]: I1006 13:56:45.183270 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a24c99b-42cc-41e8-b2e1-44c7850fc605-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"5a24c99b-42cc-41e8-b2e1-44c7850fc605\") " pod="openstack/glance-default-internal-api-0" Oct 06 13:56:45 crc kubenswrapper[4757]: I1006 13:56:45.188441 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5a24c99b-42cc-41e8-b2e1-44c7850fc605-scripts\") pod \"glance-default-internal-api-0\" (UID: \"5a24c99b-42cc-41e8-b2e1-44c7850fc605\") " pod="openstack/glance-default-internal-api-0" Oct 06 13:56:45 crc kubenswrapper[4757]: I1006 13:56:45.189984 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pfw68\" (UniqueName: \"kubernetes.io/projected/184e3393-41a0-494a-abe2-3948d5c57138-kube-api-access-pfw68\") pod \"glance-default-external-api-0\" (UID: \"184e3393-41a0-494a-abe2-3948d5c57138\") " pod="openstack/glance-default-external-api-0" Oct 06 13:56:45 crc kubenswrapper[4757]: I1006 13:56:45.190748 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w245n\" (UniqueName: \"kubernetes.io/projected/5a24c99b-42cc-41e8-b2e1-44c7850fc605-kube-api-access-w245n\") pod \"glance-default-internal-api-0\" (UID: \"5a24c99b-42cc-41e8-b2e1-44c7850fc605\") " pod="openstack/glance-default-internal-api-0" Oct 06 13:56:45 crc kubenswrapper[4757]: I1006 13:56:45.201413 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8dd7baea-3673-4c7c-be1f-80e4f3ba2ad9-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "8dd7baea-3673-4c7c-be1f-80e4f3ba2ad9" (UID: "8dd7baea-3673-4c7c-be1f-80e4f3ba2ad9"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:56:45 crc kubenswrapper[4757]: I1006 13:56:45.202050 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8dd7baea-3673-4c7c-be1f-80e4f3ba2ad9-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "8dd7baea-3673-4c7c-be1f-80e4f3ba2ad9" (UID: "8dd7baea-3673-4c7c-be1f-80e4f3ba2ad9"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:56:45 crc kubenswrapper[4757]: I1006 13:56:45.206121 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-internal-api-0\" (UID: \"5a24c99b-42cc-41e8-b2e1-44c7850fc605\") " pod="openstack/glance-default-internal-api-0" Oct 06 13:56:45 crc kubenswrapper[4757]: I1006 13:56:45.210231 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8dd7baea-3673-4c7c-be1f-80e4f3ba2ad9-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "8dd7baea-3673-4c7c-be1f-80e4f3ba2ad9" (UID: "8dd7baea-3673-4c7c-be1f-80e4f3ba2ad9"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:56:45 crc kubenswrapper[4757]: I1006 13:56:45.215656 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"184e3393-41a0-494a-abe2-3948d5c57138\") " pod="openstack/glance-default-external-api-0" Oct 06 13:56:45 crc kubenswrapper[4757]: I1006 13:56:45.216264 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8dd7baea-3673-4c7c-be1f-80e4f3ba2ad9-config" (OuterVolumeSpecName: "config") pod "8dd7baea-3673-4c7c-be1f-80e4f3ba2ad9" (UID: "8dd7baea-3673-4c7c-be1f-80e4f3ba2ad9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:56:45 crc kubenswrapper[4757]: I1006 13:56:45.228880 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8dd7baea-3673-4c7c-be1f-80e4f3ba2ad9-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "8dd7baea-3673-4c7c-be1f-80e4f3ba2ad9" (UID: "8dd7baea-3673-4c7c-be1f-80e4f3ba2ad9"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:56:45 crc kubenswrapper[4757]: I1006 13:56:45.236440 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-59bc97769f-w5b7p"] Oct 06 13:56:45 crc kubenswrapper[4757]: I1006 13:56:45.245138 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-kssfb"] Oct 06 13:56:45 crc kubenswrapper[4757]: I1006 13:56:45.265960 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rd5pj\" (UniqueName: \"kubernetes.io/projected/8dd7baea-3673-4c7c-be1f-80e4f3ba2ad9-kube-api-access-rd5pj\") on node \"crc\" DevicePath \"\"" Oct 06 13:56:45 crc kubenswrapper[4757]: I1006 13:56:45.266001 4757 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8dd7baea-3673-4c7c-be1f-80e4f3ba2ad9-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 06 13:56:45 crc kubenswrapper[4757]: I1006 13:56:45.266013 4757 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8dd7baea-3673-4c7c-be1f-80e4f3ba2ad9-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 06 13:56:45 crc kubenswrapper[4757]: I1006 13:56:45.266023 4757 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8dd7baea-3673-4c7c-be1f-80e4f3ba2ad9-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 06 13:56:45 crc kubenswrapper[4757]: I1006 13:56:45.266036 4757 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8dd7baea-3673-4c7c-be1f-80e4f3ba2ad9-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 06 13:56:45 crc kubenswrapper[4757]: I1006 13:56:45.266048 4757 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8dd7baea-3673-4c7c-be1f-80e4f3ba2ad9-config\") on node \"crc\" DevicePath \"\"" Oct 06 13:56:45 crc kubenswrapper[4757]: W1006 13:56:45.269223 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1ced3006_a395_423b_8429_f35beb1398b0.slice/crio-a9347dd93b14823e0ac46669fe95b8a0ac866c8a3247b62776c8ca625c7b18bb WatchSource:0}: Error finding container a9347dd93b14823e0ac46669fe95b8a0ac866c8a3247b62776c8ca625c7b18bb: Status 404 returned error can't find the container with id a9347dd93b14823e0ac46669fe95b8a0ac866c8a3247b62776c8ca625c7b18bb Oct 06 13:56:45 crc kubenswrapper[4757]: I1006 13:56:45.300253 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 06 13:56:45 crc kubenswrapper[4757]: I1006 13:56:45.331070 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 06 13:56:45 crc kubenswrapper[4757]: I1006 13:56:45.543537 4757 generic.go:334] "Generic (PLEG): container finished" podID="21de8b21-690d-43f2-a629-019ca257c579" containerID="cb7e3a403b87cb2731ee07ccd6538f9ce544d749683e6c3c0e967b5672c19b98" exitCode=0 Oct 06 13:56:45 crc kubenswrapper[4757]: I1006 13:56:45.543873 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b45d5f8d7-shsmn" event={"ID":"21de8b21-690d-43f2-a629-019ca257c579","Type":"ContainerDied","Data":"cb7e3a403b87cb2731ee07ccd6538f9ce544d749683e6c3c0e967b5672c19b98"} Oct 06 13:56:45 crc kubenswrapper[4757]: I1006 13:56:45.544007 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b45d5f8d7-shsmn" event={"ID":"21de8b21-690d-43f2-a629-019ca257c579","Type":"ContainerStarted","Data":"8aac0d4c42a696b1cb3e17b501c925d6d4883962bcc112d21b71acfd733b18c1"} Oct 06 13:56:45 crc kubenswrapper[4757]: I1006 13:56:45.546966 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8","Type":"ContainerStarted","Data":"ba168b87fe13f00fba56bd3b2154e79d489173b1616a451adbf1f5cc08595538"} Oct 06 13:56:45 crc kubenswrapper[4757]: I1006 13:56:45.573570 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-csjmt" event={"ID":"c1c587f7-5a59-48ad-8876-a8ca7260d9e1","Type":"ContainerStarted","Data":"11741bf6a070a7b95f60ae214af84e63160f149a2d7a859698b0e2b05ecdfd03"} Oct 06 13:56:45 crc kubenswrapper[4757]: I1006 13:56:45.573625 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-csjmt" event={"ID":"c1c587f7-5a59-48ad-8876-a8ca7260d9e1","Type":"ContainerStarted","Data":"7e64d1aec4d11221718431fe370e3ba17dcfc3228d711addefd9df16b0e61612"} Oct 06 13:56:45 crc kubenswrapper[4757]: I1006 13:56:45.575696 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6cf7447b97-crmfk" event={"ID":"8dd7baea-3673-4c7c-be1f-80e4f3ba2ad9","Type":"ContainerDied","Data":"1bceeb6f32a9e76ec4b8b249c6465a31fe9953375faaa4c428b2185b78a79b22"} Oct 06 13:56:45 crc kubenswrapper[4757]: I1006 13:56:45.575794 4757 scope.go:117] "RemoveContainer" containerID="4c64c6dfcfb912aebc0e4960c9695683f3e3acf24b9f3cc6231b8fa0eec7246e" Oct 06 13:56:45 crc kubenswrapper[4757]: I1006 13:56:45.576062 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6cf7447b97-crmfk" Oct 06 13:56:45 crc kubenswrapper[4757]: I1006 13:56:45.588858 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-kssfb" event={"ID":"1ced3006-a395-423b-8429-f35beb1398b0","Type":"ContainerStarted","Data":"a9347dd93b14823e0ac46669fe95b8a0ac866c8a3247b62776c8ca625c7b18bb"} Oct 06 13:56:45 crc kubenswrapper[4757]: I1006 13:56:45.593482 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-csjmt" podStartSLOduration=2.593462185 podStartE2EDuration="2.593462185s" podCreationTimestamp="2025-10-06 13:56:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:56:45.591628199 +0000 UTC m=+1094.088946736" watchObservedRunningTime="2025-10-06 13:56:45.593462185 +0000 UTC m=+1094.090780722" Oct 06 13:56:45 crc kubenswrapper[4757]: I1006 13:56:45.613318 4757 generic.go:334] "Generic (PLEG): container finished" podID="8b1f051f-58c8-4226-b9f0-1104e5a262f5" containerID="2fa7f14fca3ba9c8ec8766a4739c90ac5ac3483e23210bc51989494ad055e3f3" exitCode=0 Oct 06 13:56:45 crc kubenswrapper[4757]: I1006 13:56:45.614240 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59bc97769f-w5b7p" event={"ID":"8b1f051f-58c8-4226-b9f0-1104e5a262f5","Type":"ContainerDied","Data":"2fa7f14fca3ba9c8ec8766a4739c90ac5ac3483e23210bc51989494ad055e3f3"} Oct 06 13:56:45 crc kubenswrapper[4757]: I1006 13:56:45.614272 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59bc97769f-w5b7p" event={"ID":"8b1f051f-58c8-4226-b9f0-1104e5a262f5","Type":"ContainerStarted","Data":"d716c8b4c7b9db010bd6376091af9c50b85f614b83df080929ccbc0b3b405a6a"} Oct 06 13:56:45 crc kubenswrapper[4757]: I1006 13:56:45.651190 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6cf7447b97-crmfk"] Oct 06 13:56:45 crc kubenswrapper[4757]: I1006 13:56:45.665671 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6cf7447b97-crmfk"] Oct 06 13:56:45 crc kubenswrapper[4757]: I1006 13:56:45.906444 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 06 13:56:45 crc kubenswrapper[4757]: W1006 13:56:45.987196 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod184e3393_41a0_494a_abe2_3948d5c57138.slice/crio-59c905d686f9f8c872a70ef7e2b07d805d89fbebc6be1d2abd8ba063795e5d87 WatchSource:0}: Error finding container 59c905d686f9f8c872a70ef7e2b07d805d89fbebc6be1d2abd8ba063795e5d87: Status 404 returned error can't find the container with id 59c905d686f9f8c872a70ef7e2b07d805d89fbebc6be1d2abd8ba063795e5d87 Oct 06 13:56:46 crc kubenswrapper[4757]: I1006 13:56:46.151741 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6b45d5f8d7-shsmn" Oct 06 13:56:46 crc kubenswrapper[4757]: I1006 13:56:46.232570 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8dd7baea-3673-4c7c-be1f-80e4f3ba2ad9" path="/var/lib/kubelet/pods/8dd7baea-3673-4c7c-be1f-80e4f3ba2ad9/volumes" Oct 06 13:56:46 crc kubenswrapper[4757]: I1006 13:56:46.255548 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 06 13:56:46 crc kubenswrapper[4757]: I1006 13:56:46.314742 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/21de8b21-690d-43f2-a629-019ca257c579-ovsdbserver-sb\") pod \"21de8b21-690d-43f2-a629-019ca257c579\" (UID: \"21de8b21-690d-43f2-a629-019ca257c579\") " Oct 06 13:56:46 crc kubenswrapper[4757]: I1006 13:56:46.314906 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/21de8b21-690d-43f2-a629-019ca257c579-config\") pod \"21de8b21-690d-43f2-a629-019ca257c579\" (UID: \"21de8b21-690d-43f2-a629-019ca257c579\") " Oct 06 13:56:46 crc kubenswrapper[4757]: I1006 13:56:46.314940 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf9ld\" (UniqueName: \"kubernetes.io/projected/21de8b21-690d-43f2-a629-019ca257c579-kube-api-access-bf9ld\") pod \"21de8b21-690d-43f2-a629-019ca257c579\" (UID: \"21de8b21-690d-43f2-a629-019ca257c579\") " Oct 06 13:56:46 crc kubenswrapper[4757]: I1006 13:56:46.315040 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/21de8b21-690d-43f2-a629-019ca257c579-dns-swift-storage-0\") pod \"21de8b21-690d-43f2-a629-019ca257c579\" (UID: \"21de8b21-690d-43f2-a629-019ca257c579\") " Oct 06 13:56:46 crc kubenswrapper[4757]: I1006 13:56:46.315152 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/21de8b21-690d-43f2-a629-019ca257c579-dns-svc\") pod \"21de8b21-690d-43f2-a629-019ca257c579\" (UID: \"21de8b21-690d-43f2-a629-019ca257c579\") " Oct 06 13:56:46 crc kubenswrapper[4757]: I1006 13:56:46.315270 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/21de8b21-690d-43f2-a629-019ca257c579-ovsdbserver-nb\") pod \"21de8b21-690d-43f2-a629-019ca257c579\" (UID: \"21de8b21-690d-43f2-a629-019ca257c579\") " Oct 06 13:56:46 crc kubenswrapper[4757]: I1006 13:56:46.323415 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/21de8b21-690d-43f2-a629-019ca257c579-kube-api-access-bf9ld" (OuterVolumeSpecName: "kube-api-access-bf9ld") pod "21de8b21-690d-43f2-a629-019ca257c579" (UID: "21de8b21-690d-43f2-a629-019ca257c579"). InnerVolumeSpecName "kube-api-access-bf9ld". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:56:46 crc kubenswrapper[4757]: I1006 13:56:46.333458 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-2bb5-account-create-8m8c6" Oct 06 13:56:46 crc kubenswrapper[4757]: I1006 13:56:46.341051 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/21de8b21-690d-43f2-a629-019ca257c579-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "21de8b21-690d-43f2-a629-019ca257c579" (UID: "21de8b21-690d-43f2-a629-019ca257c579"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:56:46 crc kubenswrapper[4757]: I1006 13:56:46.379669 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/21de8b21-690d-43f2-a629-019ca257c579-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "21de8b21-690d-43f2-a629-019ca257c579" (UID: "21de8b21-690d-43f2-a629-019ca257c579"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:56:46 crc kubenswrapper[4757]: I1006 13:56:46.387872 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-9f43-account-create-8588z" Oct 06 13:56:46 crc kubenswrapper[4757]: I1006 13:56:46.390627 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/21de8b21-690d-43f2-a629-019ca257c579-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "21de8b21-690d-43f2-a629-019ca257c579" (UID: "21de8b21-690d-43f2-a629-019ca257c579"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:56:46 crc kubenswrapper[4757]: I1006 13:56:46.394586 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/21de8b21-690d-43f2-a629-019ca257c579-config" (OuterVolumeSpecName: "config") pod "21de8b21-690d-43f2-a629-019ca257c579" (UID: "21de8b21-690d-43f2-a629-019ca257c579"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:56:46 crc kubenswrapper[4757]: I1006 13:56:46.398442 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/21de8b21-690d-43f2-a629-019ca257c579-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "21de8b21-690d-43f2-a629-019ca257c579" (UID: "21de8b21-690d-43f2-a629-019ca257c579"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:56:46 crc kubenswrapper[4757]: I1006 13:56:46.403867 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-39b0-account-create-slfc6" Oct 06 13:56:46 crc kubenswrapper[4757]: I1006 13:56:46.420350 4757 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/21de8b21-690d-43f2-a629-019ca257c579-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 06 13:56:46 crc kubenswrapper[4757]: I1006 13:56:46.420384 4757 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/21de8b21-690d-43f2-a629-019ca257c579-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 06 13:56:46 crc kubenswrapper[4757]: I1006 13:56:46.420394 4757 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/21de8b21-690d-43f2-a629-019ca257c579-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 06 13:56:46 crc kubenswrapper[4757]: I1006 13:56:46.420403 4757 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/21de8b21-690d-43f2-a629-019ca257c579-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 06 13:56:46 crc kubenswrapper[4757]: I1006 13:56:46.420412 4757 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/21de8b21-690d-43f2-a629-019ca257c579-config\") on node \"crc\" DevicePath \"\"" Oct 06 13:56:46 crc kubenswrapper[4757]: I1006 13:56:46.420422 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf9ld\" (UniqueName: \"kubernetes.io/projected/21de8b21-690d-43f2-a629-019ca257c579-kube-api-access-bf9ld\") on node \"crc\" DevicePath \"\"" Oct 06 13:56:46 crc kubenswrapper[4757]: I1006 13:56:46.521545 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cvp5k\" (UniqueName: \"kubernetes.io/projected/ed46f68f-a023-405d-8b70-4195b82ed6eb-kube-api-access-cvp5k\") pod \"ed46f68f-a023-405d-8b70-4195b82ed6eb\" (UID: \"ed46f68f-a023-405d-8b70-4195b82ed6eb\") " Oct 06 13:56:46 crc kubenswrapper[4757]: I1006 13:56:46.521612 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pth8q\" (UniqueName: \"kubernetes.io/projected/27a36069-f211-4f8e-9512-f9e08f252f5d-kube-api-access-pth8q\") pod \"27a36069-f211-4f8e-9512-f9e08f252f5d\" (UID: \"27a36069-f211-4f8e-9512-f9e08f252f5d\") " Oct 06 13:56:46 crc kubenswrapper[4757]: I1006 13:56:46.521656 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pr5qv\" (UniqueName: \"kubernetes.io/projected/22c17521-7f6c-4704-92c2-e075c6e35f5d-kube-api-access-pr5qv\") pod \"22c17521-7f6c-4704-92c2-e075c6e35f5d\" (UID: \"22c17521-7f6c-4704-92c2-e075c6e35f5d\") " Oct 06 13:56:46 crc kubenswrapper[4757]: I1006 13:56:46.526976 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ed46f68f-a023-405d-8b70-4195b82ed6eb-kube-api-access-cvp5k" (OuterVolumeSpecName: "kube-api-access-cvp5k") pod "ed46f68f-a023-405d-8b70-4195b82ed6eb" (UID: "ed46f68f-a023-405d-8b70-4195b82ed6eb"). InnerVolumeSpecName "kube-api-access-cvp5k". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:56:46 crc kubenswrapper[4757]: I1006 13:56:46.533995 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c17521-7f6c-4704-92c2-e075c6e35f5d-kube-api-access-pr5qv" (OuterVolumeSpecName: "kube-api-access-pr5qv") pod "22c17521-7f6c-4704-92c2-e075c6e35f5d" (UID: "22c17521-7f6c-4704-92c2-e075c6e35f5d"). InnerVolumeSpecName "kube-api-access-pr5qv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:56:46 crc kubenswrapper[4757]: I1006 13:56:46.537342 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/27a36069-f211-4f8e-9512-f9e08f252f5d-kube-api-access-pth8q" (OuterVolumeSpecName: "kube-api-access-pth8q") pod "27a36069-f211-4f8e-9512-f9e08f252f5d" (UID: "27a36069-f211-4f8e-9512-f9e08f252f5d"). InnerVolumeSpecName "kube-api-access-pth8q". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:56:46 crc kubenswrapper[4757]: I1006 13:56:46.623975 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cvp5k\" (UniqueName: \"kubernetes.io/projected/ed46f68f-a023-405d-8b70-4195b82ed6eb-kube-api-access-cvp5k\") on node \"crc\" DevicePath \"\"" Oct 06 13:56:46 crc kubenswrapper[4757]: I1006 13:56:46.624008 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pth8q\" (UniqueName: \"kubernetes.io/projected/27a36069-f211-4f8e-9512-f9e08f252f5d-kube-api-access-pth8q\") on node \"crc\" DevicePath \"\"" Oct 06 13:56:46 crc kubenswrapper[4757]: I1006 13:56:46.624020 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pr5qv\" (UniqueName: \"kubernetes.io/projected/22c17521-7f6c-4704-92c2-e075c6e35f5d-kube-api-access-pr5qv\") on node \"crc\" DevicePath \"\"" Oct 06 13:56:46 crc kubenswrapper[4757]: I1006 13:56:46.629994 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59bc97769f-w5b7p" event={"ID":"8b1f051f-58c8-4226-b9f0-1104e5a262f5","Type":"ContainerStarted","Data":"70f83406b7e66172c69fd259f8074b5cf0cb95201ffe093e5c6fe5770f82f593"} Oct 06 13:56:46 crc kubenswrapper[4757]: I1006 13:56:46.630225 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-59bc97769f-w5b7p" Oct 06 13:56:46 crc kubenswrapper[4757]: I1006 13:56:46.632390 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"5a24c99b-42cc-41e8-b2e1-44c7850fc605","Type":"ContainerStarted","Data":"368c67c7dca8d251276f97bf785f241c346d95e125afe43cc1375386057abd24"} Oct 06 13:56:46 crc kubenswrapper[4757]: I1006 13:56:46.641885 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-39b0-account-create-slfc6" event={"ID":"ed46f68f-a023-405d-8b70-4195b82ed6eb","Type":"ContainerDied","Data":"537c4cfbfabb78d1adbeb7cba0e6401cf62031a49f956f9269cee02a66fd25f2"} Oct 06 13:56:46 crc kubenswrapper[4757]: I1006 13:56:46.641923 4757 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="537c4cfbfabb78d1adbeb7cba0e6401cf62031a49f956f9269cee02a66fd25f2" Oct 06 13:56:46 crc kubenswrapper[4757]: I1006 13:56:46.641901 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-39b0-account-create-slfc6" Oct 06 13:56:46 crc kubenswrapper[4757]: I1006 13:56:46.649922 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-59bc97769f-w5b7p" podStartSLOduration=2.649904511 podStartE2EDuration="2.649904511s" podCreationTimestamp="2025-10-06 13:56:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:56:46.648636083 +0000 UTC m=+1095.145954640" watchObservedRunningTime="2025-10-06 13:56:46.649904511 +0000 UTC m=+1095.147223058" Oct 06 13:56:46 crc kubenswrapper[4757]: I1006 13:56:46.651879 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b45d5f8d7-shsmn" event={"ID":"21de8b21-690d-43f2-a629-019ca257c579","Type":"ContainerDied","Data":"8aac0d4c42a696b1cb3e17b501c925d6d4883962bcc112d21b71acfd733b18c1"} Oct 06 13:56:46 crc kubenswrapper[4757]: I1006 13:56:46.651932 4757 scope.go:117] "RemoveContainer" containerID="cb7e3a403b87cb2731ee07ccd6538f9ce544d749683e6c3c0e967b5672c19b98" Oct 06 13:56:46 crc kubenswrapper[4757]: I1006 13:56:46.652019 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6b45d5f8d7-shsmn" Oct 06 13:56:46 crc kubenswrapper[4757]: I1006 13:56:46.655164 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-9f43-account-create-8588z" event={"ID":"22c17521-7f6c-4704-92c2-e075c6e35f5d","Type":"ContainerDied","Data":"ac7db629b809832c6946fd2f849cabd27ddd1a9bb352b7ca5a2e00aa45c1cdc5"} Oct 06 13:56:46 crc kubenswrapper[4757]: I1006 13:56:46.655214 4757 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ac7db629b809832c6946fd2f849cabd27ddd1a9bb352b7ca5a2e00aa45c1cdc5" Oct 06 13:56:46 crc kubenswrapper[4757]: I1006 13:56:46.655278 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-9f43-account-create-8588z" Oct 06 13:56:46 crc kubenswrapper[4757]: I1006 13:56:46.660920 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"184e3393-41a0-494a-abe2-3948d5c57138","Type":"ContainerStarted","Data":"59c905d686f9f8c872a70ef7e2b07d805d89fbebc6be1d2abd8ba063795e5d87"} Oct 06 13:56:46 crc kubenswrapper[4757]: I1006 13:56:46.670380 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-2bb5-account-create-8m8c6" event={"ID":"27a36069-f211-4f8e-9512-f9e08f252f5d","Type":"ContainerDied","Data":"2227288ce2aded2ed4ffb423684c473211e5be34b1d4be8c07cec460cfbdfc73"} Oct 06 13:56:46 crc kubenswrapper[4757]: I1006 13:56:46.670420 4757 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2227288ce2aded2ed4ffb423684c473211e5be34b1d4be8c07cec460cfbdfc73" Oct 06 13:56:46 crc kubenswrapper[4757]: I1006 13:56:46.670473 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-2bb5-account-create-8m8c6" Oct 06 13:56:46 crc kubenswrapper[4757]: I1006 13:56:46.746612 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6b45d5f8d7-shsmn"] Oct 06 13:56:46 crc kubenswrapper[4757]: I1006 13:56:46.758113 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6b45d5f8d7-shsmn"] Oct 06 13:56:47 crc kubenswrapper[4757]: I1006 13:56:47.466978 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-6znnt"] Oct 06 13:56:47 crc kubenswrapper[4757]: E1006 13:56:47.470138 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27a36069-f211-4f8e-9512-f9e08f252f5d" containerName="mariadb-account-create" Oct 06 13:56:47 crc kubenswrapper[4757]: I1006 13:56:47.470165 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="27a36069-f211-4f8e-9512-f9e08f252f5d" containerName="mariadb-account-create" Oct 06 13:56:47 crc kubenswrapper[4757]: E1006 13:56:47.470550 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="22c17521-7f6c-4704-92c2-e075c6e35f5d" containerName="mariadb-account-create" Oct 06 13:56:47 crc kubenswrapper[4757]: I1006 13:56:47.470571 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="22c17521-7f6c-4704-92c2-e075c6e35f5d" containerName="mariadb-account-create" Oct 06 13:56:47 crc kubenswrapper[4757]: E1006 13:56:47.470590 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed46f68f-a023-405d-8b70-4195b82ed6eb" containerName="mariadb-account-create" Oct 06 13:56:47 crc kubenswrapper[4757]: I1006 13:56:47.470599 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed46f68f-a023-405d-8b70-4195b82ed6eb" containerName="mariadb-account-create" Oct 06 13:56:47 crc kubenswrapper[4757]: E1006 13:56:47.470618 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8dd7baea-3673-4c7c-be1f-80e4f3ba2ad9" containerName="init" Oct 06 13:56:47 crc kubenswrapper[4757]: I1006 13:56:47.470623 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="8dd7baea-3673-4c7c-be1f-80e4f3ba2ad9" containerName="init" Oct 06 13:56:47 crc kubenswrapper[4757]: E1006 13:56:47.470638 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="21de8b21-690d-43f2-a629-019ca257c579" containerName="init" Oct 06 13:56:47 crc kubenswrapper[4757]: I1006 13:56:47.470644 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="21de8b21-690d-43f2-a629-019ca257c579" containerName="init" Oct 06 13:56:47 crc kubenswrapper[4757]: I1006 13:56:47.470819 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="ed46f68f-a023-405d-8b70-4195b82ed6eb" containerName="mariadb-account-create" Oct 06 13:56:47 crc kubenswrapper[4757]: I1006 13:56:47.470842 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="8dd7baea-3673-4c7c-be1f-80e4f3ba2ad9" containerName="init" Oct 06 13:56:47 crc kubenswrapper[4757]: I1006 13:56:47.470861 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="27a36069-f211-4f8e-9512-f9e08f252f5d" containerName="mariadb-account-create" Oct 06 13:56:47 crc kubenswrapper[4757]: I1006 13:56:47.470884 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="21de8b21-690d-43f2-a629-019ca257c579" containerName="init" Oct 06 13:56:47 crc kubenswrapper[4757]: I1006 13:56:47.470893 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="22c17521-7f6c-4704-92c2-e075c6e35f5d" containerName="mariadb-account-create" Oct 06 13:56:47 crc kubenswrapper[4757]: I1006 13:56:47.487804 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-6znnt"] Oct 06 13:56:47 crc kubenswrapper[4757]: I1006 13:56:47.487968 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-6znnt" Oct 06 13:56:47 crc kubenswrapper[4757]: I1006 13:56:47.499502 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-kms2q" Oct 06 13:56:47 crc kubenswrapper[4757]: I1006 13:56:47.502993 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Oct 06 13:56:47 crc kubenswrapper[4757]: I1006 13:56:47.641635 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 06 13:56:47 crc kubenswrapper[4757]: I1006 13:56:47.652026 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3bf60902-663f-4bbc-8415-691e6519a557-combined-ca-bundle\") pod \"barbican-db-sync-6znnt\" (UID: \"3bf60902-663f-4bbc-8415-691e6519a557\") " pod="openstack/barbican-db-sync-6znnt" Oct 06 13:56:47 crc kubenswrapper[4757]: I1006 13:56:47.652087 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cm2jp\" (UniqueName: \"kubernetes.io/projected/3bf60902-663f-4bbc-8415-691e6519a557-kube-api-access-cm2jp\") pod \"barbican-db-sync-6znnt\" (UID: \"3bf60902-663f-4bbc-8415-691e6519a557\") " pod="openstack/barbican-db-sync-6znnt" Oct 06 13:56:47 crc kubenswrapper[4757]: I1006 13:56:47.652250 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/3bf60902-663f-4bbc-8415-691e6519a557-db-sync-config-data\") pod \"barbican-db-sync-6znnt\" (UID: \"3bf60902-663f-4bbc-8415-691e6519a557\") " pod="openstack/barbican-db-sync-6znnt" Oct 06 13:56:47 crc kubenswrapper[4757]: I1006 13:56:47.691580 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 06 13:56:47 crc kubenswrapper[4757]: I1006 13:56:47.695299 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"5a24c99b-42cc-41e8-b2e1-44c7850fc605","Type":"ContainerStarted","Data":"e7c53372910df76350160efb63b34a0963eb3e04743c2687084d8b7098e339a2"} Oct 06 13:56:47 crc kubenswrapper[4757]: I1006 13:56:47.699035 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"184e3393-41a0-494a-abe2-3948d5c57138","Type":"ContainerStarted","Data":"9403e501ddf88e3a89e5562d038ad8cd740dd06a7ab340ca00c34ba728e37472"} Oct 06 13:56:47 crc kubenswrapper[4757]: I1006 13:56:47.724496 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 06 13:56:47 crc kubenswrapper[4757]: I1006 13:56:47.755043 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/3bf60902-663f-4bbc-8415-691e6519a557-db-sync-config-data\") pod \"barbican-db-sync-6znnt\" (UID: \"3bf60902-663f-4bbc-8415-691e6519a557\") " pod="openstack/barbican-db-sync-6znnt" Oct 06 13:56:47 crc kubenswrapper[4757]: I1006 13:56:47.755156 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3bf60902-663f-4bbc-8415-691e6519a557-combined-ca-bundle\") pod \"barbican-db-sync-6znnt\" (UID: \"3bf60902-663f-4bbc-8415-691e6519a557\") " pod="openstack/barbican-db-sync-6znnt" Oct 06 13:56:47 crc kubenswrapper[4757]: I1006 13:56:47.755201 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cm2jp\" (UniqueName: \"kubernetes.io/projected/3bf60902-663f-4bbc-8415-691e6519a557-kube-api-access-cm2jp\") pod \"barbican-db-sync-6znnt\" (UID: \"3bf60902-663f-4bbc-8415-691e6519a557\") " pod="openstack/barbican-db-sync-6znnt" Oct 06 13:56:47 crc kubenswrapper[4757]: I1006 13:56:47.763420 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/3bf60902-663f-4bbc-8415-691e6519a557-db-sync-config-data\") pod \"barbican-db-sync-6znnt\" (UID: \"3bf60902-663f-4bbc-8415-691e6519a557\") " pod="openstack/barbican-db-sync-6znnt" Oct 06 13:56:47 crc kubenswrapper[4757]: I1006 13:56:47.767375 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3bf60902-663f-4bbc-8415-691e6519a557-combined-ca-bundle\") pod \"barbican-db-sync-6znnt\" (UID: \"3bf60902-663f-4bbc-8415-691e6519a557\") " pod="openstack/barbican-db-sync-6znnt" Oct 06 13:56:47 crc kubenswrapper[4757]: I1006 13:56:47.775193 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-kjpw4"] Oct 06 13:56:47 crc kubenswrapper[4757]: I1006 13:56:47.776787 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-kjpw4" Oct 06 13:56:47 crc kubenswrapper[4757]: I1006 13:56:47.780454 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Oct 06 13:56:47 crc kubenswrapper[4757]: I1006 13:56:47.780585 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Oct 06 13:56:47 crc kubenswrapper[4757]: I1006 13:56:47.780733 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-4x8qm" Oct 06 13:56:47 crc kubenswrapper[4757]: I1006 13:56:47.782274 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cm2jp\" (UniqueName: \"kubernetes.io/projected/3bf60902-663f-4bbc-8415-691e6519a557-kube-api-access-cm2jp\") pod \"barbican-db-sync-6znnt\" (UID: \"3bf60902-663f-4bbc-8415-691e6519a557\") " pod="openstack/barbican-db-sync-6znnt" Oct 06 13:56:47 crc kubenswrapper[4757]: I1006 13:56:47.794892 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-kjpw4"] Oct 06 13:56:47 crc kubenswrapper[4757]: I1006 13:56:47.840922 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-6znnt" Oct 06 13:56:47 crc kubenswrapper[4757]: I1006 13:56:47.957382 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/434206fd-9f64-4c9d-a528-55d9361dad92-combined-ca-bundle\") pod \"neutron-db-sync-kjpw4\" (UID: \"434206fd-9f64-4c9d-a528-55d9361dad92\") " pod="openstack/neutron-db-sync-kjpw4" Oct 06 13:56:47 crc kubenswrapper[4757]: I1006 13:56:47.957616 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hscps\" (UniqueName: \"kubernetes.io/projected/434206fd-9f64-4c9d-a528-55d9361dad92-kube-api-access-hscps\") pod \"neutron-db-sync-kjpw4\" (UID: \"434206fd-9f64-4c9d-a528-55d9361dad92\") " pod="openstack/neutron-db-sync-kjpw4" Oct 06 13:56:47 crc kubenswrapper[4757]: I1006 13:56:47.957645 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/434206fd-9f64-4c9d-a528-55d9361dad92-config\") pod \"neutron-db-sync-kjpw4\" (UID: \"434206fd-9f64-4c9d-a528-55d9361dad92\") " pod="openstack/neutron-db-sync-kjpw4" Oct 06 13:56:48 crc kubenswrapper[4757]: I1006 13:56:48.059108 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/434206fd-9f64-4c9d-a528-55d9361dad92-combined-ca-bundle\") pod \"neutron-db-sync-kjpw4\" (UID: \"434206fd-9f64-4c9d-a528-55d9361dad92\") " pod="openstack/neutron-db-sync-kjpw4" Oct 06 13:56:48 crc kubenswrapper[4757]: I1006 13:56:48.059193 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hscps\" (UniqueName: \"kubernetes.io/projected/434206fd-9f64-4c9d-a528-55d9361dad92-kube-api-access-hscps\") pod \"neutron-db-sync-kjpw4\" (UID: \"434206fd-9f64-4c9d-a528-55d9361dad92\") " pod="openstack/neutron-db-sync-kjpw4" Oct 06 13:56:48 crc kubenswrapper[4757]: I1006 13:56:48.059224 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/434206fd-9f64-4c9d-a528-55d9361dad92-config\") pod \"neutron-db-sync-kjpw4\" (UID: \"434206fd-9f64-4c9d-a528-55d9361dad92\") " pod="openstack/neutron-db-sync-kjpw4" Oct 06 13:56:48 crc kubenswrapper[4757]: I1006 13:56:48.065881 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/434206fd-9f64-4c9d-a528-55d9361dad92-combined-ca-bundle\") pod \"neutron-db-sync-kjpw4\" (UID: \"434206fd-9f64-4c9d-a528-55d9361dad92\") " pod="openstack/neutron-db-sync-kjpw4" Oct 06 13:56:48 crc kubenswrapper[4757]: I1006 13:56:48.079755 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hscps\" (UniqueName: \"kubernetes.io/projected/434206fd-9f64-4c9d-a528-55d9361dad92-kube-api-access-hscps\") pod \"neutron-db-sync-kjpw4\" (UID: \"434206fd-9f64-4c9d-a528-55d9361dad92\") " pod="openstack/neutron-db-sync-kjpw4" Oct 06 13:56:48 crc kubenswrapper[4757]: I1006 13:56:48.080402 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/434206fd-9f64-4c9d-a528-55d9361dad92-config\") pod \"neutron-db-sync-kjpw4\" (UID: \"434206fd-9f64-4c9d-a528-55d9361dad92\") " pod="openstack/neutron-db-sync-kjpw4" Oct 06 13:56:48 crc kubenswrapper[4757]: I1006 13:56:48.205489 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="21de8b21-690d-43f2-a629-019ca257c579" path="/var/lib/kubelet/pods/21de8b21-690d-43f2-a629-019ca257c579/volumes" Oct 06 13:56:48 crc kubenswrapper[4757]: I1006 13:56:48.331357 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-kjpw4" Oct 06 13:56:48 crc kubenswrapper[4757]: I1006 13:56:48.404708 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-6znnt"] Oct 06 13:56:48 crc kubenswrapper[4757]: I1006 13:56:48.710370 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"5a24c99b-42cc-41e8-b2e1-44c7850fc605","Type":"ContainerStarted","Data":"f1b9fd6207c6fd6730fccb68ac42754472a68b0e123aa475b7efd7aba50320e7"} Oct 06 13:56:48 crc kubenswrapper[4757]: I1006 13:56:48.710942 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="5a24c99b-42cc-41e8-b2e1-44c7850fc605" containerName="glance-log" containerID="cri-o://e7c53372910df76350160efb63b34a0963eb3e04743c2687084d8b7098e339a2" gracePeriod=30 Oct 06 13:56:48 crc kubenswrapper[4757]: I1006 13:56:48.711784 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="5a24c99b-42cc-41e8-b2e1-44c7850fc605" containerName="glance-httpd" containerID="cri-o://f1b9fd6207c6fd6730fccb68ac42754472a68b0e123aa475b7efd7aba50320e7" gracePeriod=30 Oct 06 13:56:48 crc kubenswrapper[4757]: I1006 13:56:48.718740 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"184e3393-41a0-494a-abe2-3948d5c57138","Type":"ContainerStarted","Data":"482f455e70bb5f456dffdecbaf338406ea097cdc96fc447e420691d38c0c9841"} Oct 06 13:56:48 crc kubenswrapper[4757]: I1006 13:56:48.718850 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="184e3393-41a0-494a-abe2-3948d5c57138" containerName="glance-log" containerID="cri-o://9403e501ddf88e3a89e5562d038ad8cd740dd06a7ab340ca00c34ba728e37472" gracePeriod=30 Oct 06 13:56:48 crc kubenswrapper[4757]: I1006 13:56:48.718967 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="184e3393-41a0-494a-abe2-3948d5c57138" containerName="glance-httpd" containerID="cri-o://482f455e70bb5f456dffdecbaf338406ea097cdc96fc447e420691d38c0c9841" gracePeriod=30 Oct 06 13:56:48 crc kubenswrapper[4757]: I1006 13:56:48.737289 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=5.735072121 podStartE2EDuration="5.735072121s" podCreationTimestamp="2025-10-06 13:56:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:56:48.733261916 +0000 UTC m=+1097.230580453" watchObservedRunningTime="2025-10-06 13:56:48.735072121 +0000 UTC m=+1097.232390658" Oct 06 13:56:48 crc kubenswrapper[4757]: I1006 13:56:48.768988 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=5.768973185 podStartE2EDuration="5.768973185s" podCreationTimestamp="2025-10-06 13:56:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:56:48.760638058 +0000 UTC m=+1097.257956615" watchObservedRunningTime="2025-10-06 13:56:48.768973185 +0000 UTC m=+1097.266291722" Oct 06 13:56:49 crc kubenswrapper[4757]: I1006 13:56:49.729932 4757 generic.go:334] "Generic (PLEG): container finished" podID="5a24c99b-42cc-41e8-b2e1-44c7850fc605" containerID="f1b9fd6207c6fd6730fccb68ac42754472a68b0e123aa475b7efd7aba50320e7" exitCode=0 Oct 06 13:56:49 crc kubenswrapper[4757]: I1006 13:56:49.729961 4757 generic.go:334] "Generic (PLEG): container finished" podID="5a24c99b-42cc-41e8-b2e1-44c7850fc605" containerID="e7c53372910df76350160efb63b34a0963eb3e04743c2687084d8b7098e339a2" exitCode=143 Oct 06 13:56:49 crc kubenswrapper[4757]: I1006 13:56:49.730007 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"5a24c99b-42cc-41e8-b2e1-44c7850fc605","Type":"ContainerDied","Data":"f1b9fd6207c6fd6730fccb68ac42754472a68b0e123aa475b7efd7aba50320e7"} Oct 06 13:56:49 crc kubenswrapper[4757]: I1006 13:56:49.730054 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"5a24c99b-42cc-41e8-b2e1-44c7850fc605","Type":"ContainerDied","Data":"e7c53372910df76350160efb63b34a0963eb3e04743c2687084d8b7098e339a2"} Oct 06 13:56:49 crc kubenswrapper[4757]: I1006 13:56:49.731958 4757 generic.go:334] "Generic (PLEG): container finished" podID="184e3393-41a0-494a-abe2-3948d5c57138" containerID="482f455e70bb5f456dffdecbaf338406ea097cdc96fc447e420691d38c0c9841" exitCode=0 Oct 06 13:56:49 crc kubenswrapper[4757]: I1006 13:56:49.731979 4757 generic.go:334] "Generic (PLEG): container finished" podID="184e3393-41a0-494a-abe2-3948d5c57138" containerID="9403e501ddf88e3a89e5562d038ad8cd740dd06a7ab340ca00c34ba728e37472" exitCode=143 Oct 06 13:56:49 crc kubenswrapper[4757]: I1006 13:56:49.732000 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"184e3393-41a0-494a-abe2-3948d5c57138","Type":"ContainerDied","Data":"482f455e70bb5f456dffdecbaf338406ea097cdc96fc447e420691d38c0c9841"} Oct 06 13:56:49 crc kubenswrapper[4757]: I1006 13:56:49.732046 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"184e3393-41a0-494a-abe2-3948d5c57138","Type":"ContainerDied","Data":"9403e501ddf88e3a89e5562d038ad8cd740dd06a7ab340ca00c34ba728e37472"} Oct 06 13:56:49 crc kubenswrapper[4757]: I1006 13:56:49.733350 4757 generic.go:334] "Generic (PLEG): container finished" podID="c1c587f7-5a59-48ad-8876-a8ca7260d9e1" containerID="11741bf6a070a7b95f60ae214af84e63160f149a2d7a859698b0e2b05ecdfd03" exitCode=0 Oct 06 13:56:49 crc kubenswrapper[4757]: I1006 13:56:49.733372 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-csjmt" event={"ID":"c1c587f7-5a59-48ad-8876-a8ca7260d9e1","Type":"ContainerDied","Data":"11741bf6a070a7b95f60ae214af84e63160f149a2d7a859698b0e2b05ecdfd03"} Oct 06 13:56:51 crc kubenswrapper[4757]: W1006 13:56:51.828741 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3bf60902_663f_4bbc_8415_691e6519a557.slice/crio-2c169df601c4a575e6fc745352a6a4857c4c0a067b07d09f5708ffaa4432af75 WatchSource:0}: Error finding container 2c169df601c4a575e6fc745352a6a4857c4c0a067b07d09f5708ffaa4432af75: Status 404 returned error can't find the container with id 2c169df601c4a575e6fc745352a6a4857c4c0a067b07d09f5708ffaa4432af75 Oct 06 13:56:51 crc kubenswrapper[4757]: I1006 13:56:51.937546 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-csjmt" Oct 06 13:56:51 crc kubenswrapper[4757]: I1006 13:56:51.945799 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.032048 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/184e3393-41a0-494a-abe2-3948d5c57138-combined-ca-bundle\") pod \"184e3393-41a0-494a-abe2-3948d5c57138\" (UID: \"184e3393-41a0-494a-abe2-3948d5c57138\") " Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.032136 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/184e3393-41a0-494a-abe2-3948d5c57138-scripts\") pod \"184e3393-41a0-494a-abe2-3948d5c57138\" (UID: \"184e3393-41a0-494a-abe2-3948d5c57138\") " Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.032215 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/184e3393-41a0-494a-abe2-3948d5c57138-logs\") pod \"184e3393-41a0-494a-abe2-3948d5c57138\" (UID: \"184e3393-41a0-494a-abe2-3948d5c57138\") " Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.032278 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pfw68\" (UniqueName: \"kubernetes.io/projected/184e3393-41a0-494a-abe2-3948d5c57138-kube-api-access-pfw68\") pod \"184e3393-41a0-494a-abe2-3948d5c57138\" (UID: \"184e3393-41a0-494a-abe2-3948d5c57138\") " Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.032305 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"184e3393-41a0-494a-abe2-3948d5c57138\" (UID: \"184e3393-41a0-494a-abe2-3948d5c57138\") " Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.032379 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/c1c587f7-5a59-48ad-8876-a8ca7260d9e1-credential-keys\") pod \"c1c587f7-5a59-48ad-8876-a8ca7260d9e1\" (UID: \"c1c587f7-5a59-48ad-8876-a8ca7260d9e1\") " Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.032402 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/184e3393-41a0-494a-abe2-3948d5c57138-config-data\") pod \"184e3393-41a0-494a-abe2-3948d5c57138\" (UID: \"184e3393-41a0-494a-abe2-3948d5c57138\") " Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.032457 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c1c587f7-5a59-48ad-8876-a8ca7260d9e1-combined-ca-bundle\") pod \"c1c587f7-5a59-48ad-8876-a8ca7260d9e1\" (UID: \"c1c587f7-5a59-48ad-8876-a8ca7260d9e1\") " Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.032486 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/c1c587f7-5a59-48ad-8876-a8ca7260d9e1-fernet-keys\") pod \"c1c587f7-5a59-48ad-8876-a8ca7260d9e1\" (UID: \"c1c587f7-5a59-48ad-8876-a8ca7260d9e1\") " Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.032531 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xh7fs\" (UniqueName: \"kubernetes.io/projected/c1c587f7-5a59-48ad-8876-a8ca7260d9e1-kube-api-access-xh7fs\") pod \"c1c587f7-5a59-48ad-8876-a8ca7260d9e1\" (UID: \"c1c587f7-5a59-48ad-8876-a8ca7260d9e1\") " Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.032561 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/184e3393-41a0-494a-abe2-3948d5c57138-httpd-run\") pod \"184e3393-41a0-494a-abe2-3948d5c57138\" (UID: \"184e3393-41a0-494a-abe2-3948d5c57138\") " Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.032633 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c1c587f7-5a59-48ad-8876-a8ca7260d9e1-config-data\") pod \"c1c587f7-5a59-48ad-8876-a8ca7260d9e1\" (UID: \"c1c587f7-5a59-48ad-8876-a8ca7260d9e1\") " Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.032690 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c1c587f7-5a59-48ad-8876-a8ca7260d9e1-scripts\") pod \"c1c587f7-5a59-48ad-8876-a8ca7260d9e1\" (UID: \"c1c587f7-5a59-48ad-8876-a8ca7260d9e1\") " Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.034513 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/184e3393-41a0-494a-abe2-3948d5c57138-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "184e3393-41a0-494a-abe2-3948d5c57138" (UID: "184e3393-41a0-494a-abe2-3948d5c57138"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.035205 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/184e3393-41a0-494a-abe2-3948d5c57138-logs" (OuterVolumeSpecName: "logs") pod "184e3393-41a0-494a-abe2-3948d5c57138" (UID: "184e3393-41a0-494a-abe2-3948d5c57138"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.038390 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/184e3393-41a0-494a-abe2-3948d5c57138-scripts" (OuterVolumeSpecName: "scripts") pod "184e3393-41a0-494a-abe2-3948d5c57138" (UID: "184e3393-41a0-494a-abe2-3948d5c57138"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.039577 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/184e3393-41a0-494a-abe2-3948d5c57138-kube-api-access-pfw68" (OuterVolumeSpecName: "kube-api-access-pfw68") pod "184e3393-41a0-494a-abe2-3948d5c57138" (UID: "184e3393-41a0-494a-abe2-3948d5c57138"). InnerVolumeSpecName "kube-api-access-pfw68". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.039686 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c1c587f7-5a59-48ad-8876-a8ca7260d9e1-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "c1c587f7-5a59-48ad-8876-a8ca7260d9e1" (UID: "c1c587f7-5a59-48ad-8876-a8ca7260d9e1"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.041555 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage05-crc" (OuterVolumeSpecName: "glance") pod "184e3393-41a0-494a-abe2-3948d5c57138" (UID: "184e3393-41a0-494a-abe2-3948d5c57138"). InnerVolumeSpecName "local-storage05-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.043873 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c1c587f7-5a59-48ad-8876-a8ca7260d9e1-kube-api-access-xh7fs" (OuterVolumeSpecName: "kube-api-access-xh7fs") pod "c1c587f7-5a59-48ad-8876-a8ca7260d9e1" (UID: "c1c587f7-5a59-48ad-8876-a8ca7260d9e1"). InnerVolumeSpecName "kube-api-access-xh7fs". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.044392 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c1c587f7-5a59-48ad-8876-a8ca7260d9e1-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "c1c587f7-5a59-48ad-8876-a8ca7260d9e1" (UID: "c1c587f7-5a59-48ad-8876-a8ca7260d9e1"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.047545 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c1c587f7-5a59-48ad-8876-a8ca7260d9e1-scripts" (OuterVolumeSpecName: "scripts") pod "c1c587f7-5a59-48ad-8876-a8ca7260d9e1" (UID: "c1c587f7-5a59-48ad-8876-a8ca7260d9e1"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.082942 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c1c587f7-5a59-48ad-8876-a8ca7260d9e1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c1c587f7-5a59-48ad-8876-a8ca7260d9e1" (UID: "c1c587f7-5a59-48ad-8876-a8ca7260d9e1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.083638 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/184e3393-41a0-494a-abe2-3948d5c57138-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "184e3393-41a0-494a-abe2-3948d5c57138" (UID: "184e3393-41a0-494a-abe2-3948d5c57138"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.092497 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c1c587f7-5a59-48ad-8876-a8ca7260d9e1-config-data" (OuterVolumeSpecName: "config-data") pod "c1c587f7-5a59-48ad-8876-a8ca7260d9e1" (UID: "c1c587f7-5a59-48ad-8876-a8ca7260d9e1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.105138 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/184e3393-41a0-494a-abe2-3948d5c57138-config-data" (OuterVolumeSpecName: "config-data") pod "184e3393-41a0-494a-abe2-3948d5c57138" (UID: "184e3393-41a0-494a-abe2-3948d5c57138"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.135116 4757 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/184e3393-41a0-494a-abe2-3948d5c57138-logs\") on node \"crc\" DevicePath \"\"" Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.135160 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pfw68\" (UniqueName: \"kubernetes.io/projected/184e3393-41a0-494a-abe2-3948d5c57138-kube-api-access-pfw68\") on node \"crc\" DevicePath \"\"" Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.135194 4757 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" " Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.135206 4757 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/c1c587f7-5a59-48ad-8876-a8ca7260d9e1-credential-keys\") on node \"crc\" DevicePath \"\"" Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.135217 4757 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/184e3393-41a0-494a-abe2-3948d5c57138-config-data\") on node \"crc\" DevicePath \"\"" Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.135225 4757 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c1c587f7-5a59-48ad-8876-a8ca7260d9e1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.135233 4757 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/c1c587f7-5a59-48ad-8876-a8ca7260d9e1-fernet-keys\") on node \"crc\" DevicePath \"\"" Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.135241 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xh7fs\" (UniqueName: \"kubernetes.io/projected/c1c587f7-5a59-48ad-8876-a8ca7260d9e1-kube-api-access-xh7fs\") on node \"crc\" DevicePath \"\"" Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.135249 4757 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/184e3393-41a0-494a-abe2-3948d5c57138-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.135257 4757 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c1c587f7-5a59-48ad-8876-a8ca7260d9e1-config-data\") on node \"crc\" DevicePath \"\"" Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.135265 4757 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c1c587f7-5a59-48ad-8876-a8ca7260d9e1-scripts\") on node \"crc\" DevicePath \"\"" Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.135273 4757 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/184e3393-41a0-494a-abe2-3948d5c57138-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.135281 4757 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/184e3393-41a0-494a-abe2-3948d5c57138-scripts\") on node \"crc\" DevicePath \"\"" Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.162674 4757 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage05-crc" (UniqueName: "kubernetes.io/local-volume/local-storage05-crc") on node "crc" Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.236393 4757 reconciler_common.go:293] "Volume detached for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" DevicePath \"\"" Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.493731 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.504463 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-4nqxd"] Oct 06 13:56:52 crc kubenswrapper[4757]: E1006 13:56:52.505394 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c1c587f7-5a59-48ad-8876-a8ca7260d9e1" containerName="keystone-bootstrap" Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.505420 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="c1c587f7-5a59-48ad-8876-a8ca7260d9e1" containerName="keystone-bootstrap" Oct 06 13:56:52 crc kubenswrapper[4757]: E1006 13:56:52.505459 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5a24c99b-42cc-41e8-b2e1-44c7850fc605" containerName="glance-httpd" Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.505468 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="5a24c99b-42cc-41e8-b2e1-44c7850fc605" containerName="glance-httpd" Oct 06 13:56:52 crc kubenswrapper[4757]: E1006 13:56:52.505484 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="184e3393-41a0-494a-abe2-3948d5c57138" containerName="glance-log" Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.505491 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="184e3393-41a0-494a-abe2-3948d5c57138" containerName="glance-log" Oct 06 13:56:52 crc kubenswrapper[4757]: E1006 13:56:52.505521 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="184e3393-41a0-494a-abe2-3948d5c57138" containerName="glance-httpd" Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.505529 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="184e3393-41a0-494a-abe2-3948d5c57138" containerName="glance-httpd" Oct 06 13:56:52 crc kubenswrapper[4757]: E1006 13:56:52.505548 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5a24c99b-42cc-41e8-b2e1-44c7850fc605" containerName="glance-log" Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.505556 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="5a24c99b-42cc-41e8-b2e1-44c7850fc605" containerName="glance-log" Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.506004 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="184e3393-41a0-494a-abe2-3948d5c57138" containerName="glance-httpd" Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.506038 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="5a24c99b-42cc-41e8-b2e1-44c7850fc605" containerName="glance-httpd" Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.506061 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="c1c587f7-5a59-48ad-8876-a8ca7260d9e1" containerName="keystone-bootstrap" Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.506076 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="5a24c99b-42cc-41e8-b2e1-44c7850fc605" containerName="glance-log" Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.506112 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="184e3393-41a0-494a-abe2-3948d5c57138" containerName="glance-log" Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.506911 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-4nqxd" Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.509385 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.509466 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-w5d5j" Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.512211 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.535581 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-4nqxd"] Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.652654 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"5a24c99b-42cc-41e8-b2e1-44c7850fc605\" (UID: \"5a24c99b-42cc-41e8-b2e1-44c7850fc605\") " Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.653247 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5a24c99b-42cc-41e8-b2e1-44c7850fc605-httpd-run\") pod \"5a24c99b-42cc-41e8-b2e1-44c7850fc605\" (UID: \"5a24c99b-42cc-41e8-b2e1-44c7850fc605\") " Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.653315 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a24c99b-42cc-41e8-b2e1-44c7850fc605-combined-ca-bundle\") pod \"5a24c99b-42cc-41e8-b2e1-44c7850fc605\" (UID: \"5a24c99b-42cc-41e8-b2e1-44c7850fc605\") " Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.653346 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5a24c99b-42cc-41e8-b2e1-44c7850fc605-scripts\") pod \"5a24c99b-42cc-41e8-b2e1-44c7850fc605\" (UID: \"5a24c99b-42cc-41e8-b2e1-44c7850fc605\") " Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.653378 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w245n\" (UniqueName: \"kubernetes.io/projected/5a24c99b-42cc-41e8-b2e1-44c7850fc605-kube-api-access-w245n\") pod \"5a24c99b-42cc-41e8-b2e1-44c7850fc605\" (UID: \"5a24c99b-42cc-41e8-b2e1-44c7850fc605\") " Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.653431 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5a24c99b-42cc-41e8-b2e1-44c7850fc605-config-data\") pod \"5a24c99b-42cc-41e8-b2e1-44c7850fc605\" (UID: \"5a24c99b-42cc-41e8-b2e1-44c7850fc605\") " Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.653484 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5a24c99b-42cc-41e8-b2e1-44c7850fc605-logs\") pod \"5a24c99b-42cc-41e8-b2e1-44c7850fc605\" (UID: \"5a24c99b-42cc-41e8-b2e1-44c7850fc605\") " Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.653701 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f705f558-519e-489e-8ff4-5b3eb4476eff-config-data\") pod \"cinder-db-sync-4nqxd\" (UID: \"f705f558-519e-489e-8ff4-5b3eb4476eff\") " pod="openstack/cinder-db-sync-4nqxd" Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.653729 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f705f558-519e-489e-8ff4-5b3eb4476eff-etc-machine-id\") pod \"cinder-db-sync-4nqxd\" (UID: \"f705f558-519e-489e-8ff4-5b3eb4476eff\") " pod="openstack/cinder-db-sync-4nqxd" Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.653794 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/f705f558-519e-489e-8ff4-5b3eb4476eff-db-sync-config-data\") pod \"cinder-db-sync-4nqxd\" (UID: \"f705f558-519e-489e-8ff4-5b3eb4476eff\") " pod="openstack/cinder-db-sync-4nqxd" Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.654024 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f705f558-519e-489e-8ff4-5b3eb4476eff-scripts\") pod \"cinder-db-sync-4nqxd\" (UID: \"f705f558-519e-489e-8ff4-5b3eb4476eff\") " pod="openstack/cinder-db-sync-4nqxd" Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.654063 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fj4s7\" (UniqueName: \"kubernetes.io/projected/f705f558-519e-489e-8ff4-5b3eb4476eff-kube-api-access-fj4s7\") pod \"cinder-db-sync-4nqxd\" (UID: \"f705f558-519e-489e-8ff4-5b3eb4476eff\") " pod="openstack/cinder-db-sync-4nqxd" Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.654088 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f705f558-519e-489e-8ff4-5b3eb4476eff-combined-ca-bundle\") pod \"cinder-db-sync-4nqxd\" (UID: \"f705f558-519e-489e-8ff4-5b3eb4476eff\") " pod="openstack/cinder-db-sync-4nqxd" Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.656834 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5a24c99b-42cc-41e8-b2e1-44c7850fc605-logs" (OuterVolumeSpecName: "logs") pod "5a24c99b-42cc-41e8-b2e1-44c7850fc605" (UID: "5a24c99b-42cc-41e8-b2e1-44c7850fc605"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.660171 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5a24c99b-42cc-41e8-b2e1-44c7850fc605-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "5a24c99b-42cc-41e8-b2e1-44c7850fc605" (UID: "5a24c99b-42cc-41e8-b2e1-44c7850fc605"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.660944 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage02-crc" (OuterVolumeSpecName: "glance") pod "5a24c99b-42cc-41e8-b2e1-44c7850fc605" (UID: "5a24c99b-42cc-41e8-b2e1-44c7850fc605"). InnerVolumeSpecName "local-storage02-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.668995 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5a24c99b-42cc-41e8-b2e1-44c7850fc605-kube-api-access-w245n" (OuterVolumeSpecName: "kube-api-access-w245n") pod "5a24c99b-42cc-41e8-b2e1-44c7850fc605" (UID: "5a24c99b-42cc-41e8-b2e1-44c7850fc605"). InnerVolumeSpecName "kube-api-access-w245n". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.669632 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5a24c99b-42cc-41e8-b2e1-44c7850fc605-scripts" (OuterVolumeSpecName: "scripts") pod "5a24c99b-42cc-41e8-b2e1-44c7850fc605" (UID: "5a24c99b-42cc-41e8-b2e1-44c7850fc605"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.694138 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5a24c99b-42cc-41e8-b2e1-44c7850fc605-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5a24c99b-42cc-41e8-b2e1-44c7850fc605" (UID: "5a24c99b-42cc-41e8-b2e1-44c7850fc605"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.755239 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/f705f558-519e-489e-8ff4-5b3eb4476eff-db-sync-config-data\") pod \"cinder-db-sync-4nqxd\" (UID: \"f705f558-519e-489e-8ff4-5b3eb4476eff\") " pod="openstack/cinder-db-sync-4nqxd" Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.755680 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f705f558-519e-489e-8ff4-5b3eb4476eff-scripts\") pod \"cinder-db-sync-4nqxd\" (UID: \"f705f558-519e-489e-8ff4-5b3eb4476eff\") " pod="openstack/cinder-db-sync-4nqxd" Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.755723 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fj4s7\" (UniqueName: \"kubernetes.io/projected/f705f558-519e-489e-8ff4-5b3eb4476eff-kube-api-access-fj4s7\") pod \"cinder-db-sync-4nqxd\" (UID: \"f705f558-519e-489e-8ff4-5b3eb4476eff\") " pod="openstack/cinder-db-sync-4nqxd" Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.755748 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f705f558-519e-489e-8ff4-5b3eb4476eff-combined-ca-bundle\") pod \"cinder-db-sync-4nqxd\" (UID: \"f705f558-519e-489e-8ff4-5b3eb4476eff\") " pod="openstack/cinder-db-sync-4nqxd" Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.755812 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f705f558-519e-489e-8ff4-5b3eb4476eff-config-data\") pod \"cinder-db-sync-4nqxd\" (UID: \"f705f558-519e-489e-8ff4-5b3eb4476eff\") " pod="openstack/cinder-db-sync-4nqxd" Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.755847 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f705f558-519e-489e-8ff4-5b3eb4476eff-etc-machine-id\") pod \"cinder-db-sync-4nqxd\" (UID: \"f705f558-519e-489e-8ff4-5b3eb4476eff\") " pod="openstack/cinder-db-sync-4nqxd" Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.755948 4757 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5a24c99b-42cc-41e8-b2e1-44c7850fc605-scripts\") on node \"crc\" DevicePath \"\"" Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.755966 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w245n\" (UniqueName: \"kubernetes.io/projected/5a24c99b-42cc-41e8-b2e1-44c7850fc605-kube-api-access-w245n\") on node \"crc\" DevicePath \"\"" Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.755979 4757 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5a24c99b-42cc-41e8-b2e1-44c7850fc605-logs\") on node \"crc\" DevicePath \"\"" Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.756002 4757 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" " Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.756014 4757 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5a24c99b-42cc-41e8-b2e1-44c7850fc605-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.756025 4757 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a24c99b-42cc-41e8-b2e1-44c7850fc605-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.756655 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f705f558-519e-489e-8ff4-5b3eb4476eff-etc-machine-id\") pod \"cinder-db-sync-4nqxd\" (UID: \"f705f558-519e-489e-8ff4-5b3eb4476eff\") " pod="openstack/cinder-db-sync-4nqxd" Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.762754 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/f705f558-519e-489e-8ff4-5b3eb4476eff-db-sync-config-data\") pod \"cinder-db-sync-4nqxd\" (UID: \"f705f558-519e-489e-8ff4-5b3eb4476eff\") " pod="openstack/cinder-db-sync-4nqxd" Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.764870 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f705f558-519e-489e-8ff4-5b3eb4476eff-scripts\") pod \"cinder-db-sync-4nqxd\" (UID: \"f705f558-519e-489e-8ff4-5b3eb4476eff\") " pod="openstack/cinder-db-sync-4nqxd" Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.765550 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f705f558-519e-489e-8ff4-5b3eb4476eff-config-data\") pod \"cinder-db-sync-4nqxd\" (UID: \"f705f558-519e-489e-8ff4-5b3eb4476eff\") " pod="openstack/cinder-db-sync-4nqxd" Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.765960 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f705f558-519e-489e-8ff4-5b3eb4476eff-combined-ca-bundle\") pod \"cinder-db-sync-4nqxd\" (UID: \"f705f558-519e-489e-8ff4-5b3eb4476eff\") " pod="openstack/cinder-db-sync-4nqxd" Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.766251 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5a24c99b-42cc-41e8-b2e1-44c7850fc605-config-data" (OuterVolumeSpecName: "config-data") pod "5a24c99b-42cc-41e8-b2e1-44c7850fc605" (UID: "5a24c99b-42cc-41e8-b2e1-44c7850fc605"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.775328 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fj4s7\" (UniqueName: \"kubernetes.io/projected/f705f558-519e-489e-8ff4-5b3eb4476eff-kube-api-access-fj4s7\") pod \"cinder-db-sync-4nqxd\" (UID: \"f705f558-519e-489e-8ff4-5b3eb4476eff\") " pod="openstack/cinder-db-sync-4nqxd" Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.777279 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-kssfb" event={"ID":"1ced3006-a395-423b-8429-f35beb1398b0","Type":"ContainerStarted","Data":"261f6b4aa7ccd2c620651f90574d988ef7b35bce2e0e2eb1b3acfa5a31d3ae4a"} Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.780765 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-6znnt" event={"ID":"3bf60902-663f-4bbc-8415-691e6519a557","Type":"ContainerStarted","Data":"2c169df601c4a575e6fc745352a6a4857c4c0a067b07d09f5708ffaa4432af75"} Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.783494 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"5a24c99b-42cc-41e8-b2e1-44c7850fc605","Type":"ContainerDied","Data":"368c67c7dca8d251276f97bf785f241c346d95e125afe43cc1375386057abd24"} Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.783509 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.783536 4757 scope.go:117] "RemoveContainer" containerID="f1b9fd6207c6fd6730fccb68ac42754472a68b0e123aa475b7efd7aba50320e7" Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.793528 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"184e3393-41a0-494a-abe2-3948d5c57138","Type":"ContainerDied","Data":"59c905d686f9f8c872a70ef7e2b07d805d89fbebc6be1d2abd8ba063795e5d87"} Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.793696 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.799403 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8","Type":"ContainerStarted","Data":"6e2deb40869d8e3b337a246d8a53afdfb67a72beae0813b065f38ab3aadcb0db"} Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.804167 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-kssfb" podStartSLOduration=1.591536686 podStartE2EDuration="8.804150564s" podCreationTimestamp="2025-10-06 13:56:44 +0000 UTC" firstStartedPulling="2025-10-06 13:56:45.282236156 +0000 UTC m=+1093.779554693" lastFinishedPulling="2025-10-06 13:56:52.494850034 +0000 UTC m=+1100.992168571" observedRunningTime="2025-10-06 13:56:52.796040085 +0000 UTC m=+1101.293358622" watchObservedRunningTime="2025-10-06 13:56:52.804150564 +0000 UTC m=+1101.301469101" Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.808729 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-csjmt" event={"ID":"c1c587f7-5a59-48ad-8876-a8ca7260d9e1","Type":"ContainerDied","Data":"7e64d1aec4d11221718431fe370e3ba17dcfc3228d711addefd9df16b0e61612"} Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.808767 4757 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7e64d1aec4d11221718431fe370e3ba17dcfc3228d711addefd9df16b0e61612" Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.808826 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-csjmt" Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.817610 4757 scope.go:117] "RemoveContainer" containerID="e7c53372910df76350160efb63b34a0963eb3e04743c2687084d8b7098e339a2" Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.825410 4757 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage02-crc" (UniqueName: "kubernetes.io/local-volume/local-storage02-crc") on node "crc" Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.839443 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-4nqxd" Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.858358 4757 reconciler_common.go:293] "Volume detached for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" DevicePath \"\"" Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.858395 4757 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5a24c99b-42cc-41e8-b2e1-44c7850fc605-config-data\") on node \"crc\" DevicePath \"\"" Oct 06 13:56:52 crc kubenswrapper[4757]: I1006 13:56:52.975148 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 06 13:56:53 crc kubenswrapper[4757]: I1006 13:56:52.994987 4757 scope.go:117] "RemoveContainer" containerID="482f455e70bb5f456dffdecbaf338406ea097cdc96fc447e420691d38c0c9841" Oct 06 13:56:53 crc kubenswrapper[4757]: I1006 13:56:52.995164 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 06 13:56:53 crc kubenswrapper[4757]: I1006 13:56:53.009208 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 06 13:56:53 crc kubenswrapper[4757]: I1006 13:56:53.025164 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 06 13:56:53 crc kubenswrapper[4757]: I1006 13:56:53.031506 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Oct 06 13:56:53 crc kubenswrapper[4757]: I1006 13:56:53.033247 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 06 13:56:53 crc kubenswrapper[4757]: I1006 13:56:53.038226 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Oct 06 13:56:53 crc kubenswrapper[4757]: I1006 13:56:53.038492 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-ksjg8" Oct 06 13:56:53 crc kubenswrapper[4757]: I1006 13:56:53.038528 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Oct 06 13:56:53 crc kubenswrapper[4757]: I1006 13:56:53.038673 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Oct 06 13:56:53 crc kubenswrapper[4757]: I1006 13:56:53.074999 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-kjpw4"] Oct 06 13:56:53 crc kubenswrapper[4757]: I1006 13:56:53.096370 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 06 13:56:53 crc kubenswrapper[4757]: I1006 13:56:53.098018 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 06 13:56:53 crc kubenswrapper[4757]: I1006 13:56:53.102866 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Oct 06 13:56:53 crc kubenswrapper[4757]: I1006 13:56:53.116756 4757 scope.go:117] "RemoveContainer" containerID="9403e501ddf88e3a89e5562d038ad8cd740dd06a7ab340ca00c34ba728e37472" Oct 06 13:56:53 crc kubenswrapper[4757]: I1006 13:56:53.123158 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 06 13:56:53 crc kubenswrapper[4757]: I1006 13:56:53.138164 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 06 13:56:53 crc kubenswrapper[4757]: I1006 13:56:53.182437 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162\") " pod="openstack/glance-default-external-api-0" Oct 06 13:56:53 crc kubenswrapper[4757]: I1006 13:56:53.182506 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162\") " pod="openstack/glance-default-external-api-0" Oct 06 13:56:53 crc kubenswrapper[4757]: I1006 13:56:53.182548 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162-scripts\") pod \"glance-default-external-api-0\" (UID: \"e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162\") " pod="openstack/glance-default-external-api-0" Oct 06 13:56:53 crc kubenswrapper[4757]: I1006 13:56:53.182572 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vdxzf\" (UniqueName: \"kubernetes.io/projected/e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162-kube-api-access-vdxzf\") pod \"glance-default-external-api-0\" (UID: \"e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162\") " pod="openstack/glance-default-external-api-0" Oct 06 13:56:53 crc kubenswrapper[4757]: I1006 13:56:53.182594 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162\") " pod="openstack/glance-default-external-api-0" Oct 06 13:56:53 crc kubenswrapper[4757]: I1006 13:56:53.182620 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162-logs\") pod \"glance-default-external-api-0\" (UID: \"e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162\") " pod="openstack/glance-default-external-api-0" Oct 06 13:56:53 crc kubenswrapper[4757]: I1006 13:56:53.182640 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162\") " pod="openstack/glance-default-external-api-0" Oct 06 13:56:53 crc kubenswrapper[4757]: I1006 13:56:53.182694 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162-config-data\") pod \"glance-default-external-api-0\" (UID: \"e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162\") " pod="openstack/glance-default-external-api-0" Oct 06 13:56:53 crc kubenswrapper[4757]: I1006 13:56:53.198053 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 06 13:56:53 crc kubenswrapper[4757]: I1006 13:56:53.205798 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-csjmt"] Oct 06 13:56:53 crc kubenswrapper[4757]: I1006 13:56:53.216360 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-csjmt"] Oct 06 13:56:53 crc kubenswrapper[4757]: E1006 13:56:53.279137 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[combined-ca-bundle config-data glance httpd-run kube-api-access-hz86l logs scripts], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/glance-default-internal-api-0" podUID="554a44d5-55f5-48d0-83d2-e82015b807e8" Oct 06 13:56:53 crc kubenswrapper[4757]: I1006 13:56:53.285558 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/554a44d5-55f5-48d0-83d2-e82015b807e8-scripts\") pod \"glance-default-internal-api-0\" (UID: \"554a44d5-55f5-48d0-83d2-e82015b807e8\") " pod="openstack/glance-default-internal-api-0" Oct 06 13:56:53 crc kubenswrapper[4757]: I1006 13:56:53.285630 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/554a44d5-55f5-48d0-83d2-e82015b807e8-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"554a44d5-55f5-48d0-83d2-e82015b807e8\") " pod="openstack/glance-default-internal-api-0" Oct 06 13:56:53 crc kubenswrapper[4757]: I1006 13:56:53.285695 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162\") " pod="openstack/glance-default-external-api-0" Oct 06 13:56:53 crc kubenswrapper[4757]: I1006 13:56:53.285760 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162\") " pod="openstack/glance-default-external-api-0" Oct 06 13:56:53 crc kubenswrapper[4757]: I1006 13:56:53.285782 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-internal-api-0\" (UID: \"554a44d5-55f5-48d0-83d2-e82015b807e8\") " pod="openstack/glance-default-internal-api-0" Oct 06 13:56:53 crc kubenswrapper[4757]: I1006 13:56:53.285885 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162-scripts\") pod \"glance-default-external-api-0\" (UID: \"e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162\") " pod="openstack/glance-default-external-api-0" Oct 06 13:56:53 crc kubenswrapper[4757]: I1006 13:56:53.285931 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vdxzf\" (UniqueName: \"kubernetes.io/projected/e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162-kube-api-access-vdxzf\") pod \"glance-default-external-api-0\" (UID: \"e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162\") " pod="openstack/glance-default-external-api-0" Oct 06 13:56:53 crc kubenswrapper[4757]: I1006 13:56:53.286760 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162\") " pod="openstack/glance-default-external-api-0" Oct 06 13:56:53 crc kubenswrapper[4757]: I1006 13:56:53.286816 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162-logs\") pod \"glance-default-external-api-0\" (UID: \"e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162\") " pod="openstack/glance-default-external-api-0" Oct 06 13:56:53 crc kubenswrapper[4757]: I1006 13:56:53.286856 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162\") " pod="openstack/glance-default-external-api-0" Oct 06 13:56:53 crc kubenswrapper[4757]: I1006 13:56:53.286966 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hz86l\" (UniqueName: \"kubernetes.io/projected/554a44d5-55f5-48d0-83d2-e82015b807e8-kube-api-access-hz86l\") pod \"glance-default-internal-api-0\" (UID: \"554a44d5-55f5-48d0-83d2-e82015b807e8\") " pod="openstack/glance-default-internal-api-0" Oct 06 13:56:53 crc kubenswrapper[4757]: I1006 13:56:53.286994 4757 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/glance-default-external-api-0" Oct 06 13:56:53 crc kubenswrapper[4757]: I1006 13:56:53.287003 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/554a44d5-55f5-48d0-83d2-e82015b807e8-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"554a44d5-55f5-48d0-83d2-e82015b807e8\") " pod="openstack/glance-default-internal-api-0" Oct 06 13:56:53 crc kubenswrapper[4757]: I1006 13:56:53.287407 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162-config-data\") pod \"glance-default-external-api-0\" (UID: \"e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162\") " pod="openstack/glance-default-external-api-0" Oct 06 13:56:53 crc kubenswrapper[4757]: I1006 13:56:53.287455 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/554a44d5-55f5-48d0-83d2-e82015b807e8-config-data\") pod \"glance-default-internal-api-0\" (UID: \"554a44d5-55f5-48d0-83d2-e82015b807e8\") " pod="openstack/glance-default-internal-api-0" Oct 06 13:56:53 crc kubenswrapper[4757]: I1006 13:56:53.287486 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/554a44d5-55f5-48d0-83d2-e82015b807e8-logs\") pod \"glance-default-internal-api-0\" (UID: \"554a44d5-55f5-48d0-83d2-e82015b807e8\") " pod="openstack/glance-default-internal-api-0" Oct 06 13:56:53 crc kubenswrapper[4757]: I1006 13:56:53.288340 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162-logs\") pod \"glance-default-external-api-0\" (UID: \"e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162\") " pod="openstack/glance-default-external-api-0" Oct 06 13:56:53 crc kubenswrapper[4757]: I1006 13:56:53.287979 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162\") " pod="openstack/glance-default-external-api-0" Oct 06 13:56:53 crc kubenswrapper[4757]: I1006 13:56:53.295784 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162\") " pod="openstack/glance-default-external-api-0" Oct 06 13:56:53 crc kubenswrapper[4757]: I1006 13:56:53.297315 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162-scripts\") pod \"glance-default-external-api-0\" (UID: \"e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162\") " pod="openstack/glance-default-external-api-0" Oct 06 13:56:53 crc kubenswrapper[4757]: I1006 13:56:53.299695 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162-config-data\") pod \"glance-default-external-api-0\" (UID: \"e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162\") " pod="openstack/glance-default-external-api-0" Oct 06 13:56:53 crc kubenswrapper[4757]: I1006 13:56:53.302705 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162\") " pod="openstack/glance-default-external-api-0" Oct 06 13:56:53 crc kubenswrapper[4757]: I1006 13:56:53.306596 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vdxzf\" (UniqueName: \"kubernetes.io/projected/e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162-kube-api-access-vdxzf\") pod \"glance-default-external-api-0\" (UID: \"e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162\") " pod="openstack/glance-default-external-api-0" Oct 06 13:56:53 crc kubenswrapper[4757]: I1006 13:56:53.315852 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162\") " pod="openstack/glance-default-external-api-0" Oct 06 13:56:53 crc kubenswrapper[4757]: I1006 13:56:53.316108 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-v77kw"] Oct 06 13:56:53 crc kubenswrapper[4757]: I1006 13:56:53.317294 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-v77kw" Oct 06 13:56:53 crc kubenswrapper[4757]: I1006 13:56:53.320197 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-d9pz6" Oct 06 13:56:53 crc kubenswrapper[4757]: I1006 13:56:53.320520 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Oct 06 13:56:53 crc kubenswrapper[4757]: I1006 13:56:53.320605 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Oct 06 13:56:53 crc kubenswrapper[4757]: I1006 13:56:53.320719 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Oct 06 13:56:53 crc kubenswrapper[4757]: I1006 13:56:53.332071 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-v77kw"] Oct 06 13:56:53 crc kubenswrapper[4757]: I1006 13:56:53.369067 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 06 13:56:53 crc kubenswrapper[4757]: I1006 13:56:53.391527 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hz86l\" (UniqueName: \"kubernetes.io/projected/554a44d5-55f5-48d0-83d2-e82015b807e8-kube-api-access-hz86l\") pod \"glance-default-internal-api-0\" (UID: \"554a44d5-55f5-48d0-83d2-e82015b807e8\") " pod="openstack/glance-default-internal-api-0" Oct 06 13:56:53 crc kubenswrapper[4757]: I1006 13:56:53.391620 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/554a44d5-55f5-48d0-83d2-e82015b807e8-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"554a44d5-55f5-48d0-83d2-e82015b807e8\") " pod="openstack/glance-default-internal-api-0" Oct 06 13:56:53 crc kubenswrapper[4757]: I1006 13:56:53.391699 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/554a44d5-55f5-48d0-83d2-e82015b807e8-config-data\") pod \"glance-default-internal-api-0\" (UID: \"554a44d5-55f5-48d0-83d2-e82015b807e8\") " pod="openstack/glance-default-internal-api-0" Oct 06 13:56:53 crc kubenswrapper[4757]: I1006 13:56:53.391737 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/554a44d5-55f5-48d0-83d2-e82015b807e8-logs\") pod \"glance-default-internal-api-0\" (UID: \"554a44d5-55f5-48d0-83d2-e82015b807e8\") " pod="openstack/glance-default-internal-api-0" Oct 06 13:56:53 crc kubenswrapper[4757]: I1006 13:56:53.391806 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/554a44d5-55f5-48d0-83d2-e82015b807e8-scripts\") pod \"glance-default-internal-api-0\" (UID: \"554a44d5-55f5-48d0-83d2-e82015b807e8\") " pod="openstack/glance-default-internal-api-0" Oct 06 13:56:53 crc kubenswrapper[4757]: I1006 13:56:53.391871 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/554a44d5-55f5-48d0-83d2-e82015b807e8-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"554a44d5-55f5-48d0-83d2-e82015b807e8\") " pod="openstack/glance-default-internal-api-0" Oct 06 13:56:53 crc kubenswrapper[4757]: I1006 13:56:53.391989 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-internal-api-0\" (UID: \"554a44d5-55f5-48d0-83d2-e82015b807e8\") " pod="openstack/glance-default-internal-api-0" Oct 06 13:56:53 crc kubenswrapper[4757]: I1006 13:56:53.392398 4757 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-internal-api-0\" (UID: \"554a44d5-55f5-48d0-83d2-e82015b807e8\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/glance-default-internal-api-0" Oct 06 13:56:53 crc kubenswrapper[4757]: I1006 13:56:53.392944 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/554a44d5-55f5-48d0-83d2-e82015b807e8-logs\") pod \"glance-default-internal-api-0\" (UID: \"554a44d5-55f5-48d0-83d2-e82015b807e8\") " pod="openstack/glance-default-internal-api-0" Oct 06 13:56:53 crc kubenswrapper[4757]: I1006 13:56:53.393308 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/554a44d5-55f5-48d0-83d2-e82015b807e8-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"554a44d5-55f5-48d0-83d2-e82015b807e8\") " pod="openstack/glance-default-internal-api-0" Oct 06 13:56:53 crc kubenswrapper[4757]: I1006 13:56:53.402220 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/554a44d5-55f5-48d0-83d2-e82015b807e8-scripts\") pod \"glance-default-internal-api-0\" (UID: \"554a44d5-55f5-48d0-83d2-e82015b807e8\") " pod="openstack/glance-default-internal-api-0" Oct 06 13:56:53 crc kubenswrapper[4757]: I1006 13:56:53.402646 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/554a44d5-55f5-48d0-83d2-e82015b807e8-config-data\") pod \"glance-default-internal-api-0\" (UID: \"554a44d5-55f5-48d0-83d2-e82015b807e8\") " pod="openstack/glance-default-internal-api-0" Oct 06 13:56:53 crc kubenswrapper[4757]: I1006 13:56:53.408559 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/554a44d5-55f5-48d0-83d2-e82015b807e8-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"554a44d5-55f5-48d0-83d2-e82015b807e8\") " pod="openstack/glance-default-internal-api-0" Oct 06 13:56:53 crc kubenswrapper[4757]: I1006 13:56:53.415812 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hz86l\" (UniqueName: \"kubernetes.io/projected/554a44d5-55f5-48d0-83d2-e82015b807e8-kube-api-access-hz86l\") pod \"glance-default-internal-api-0\" (UID: \"554a44d5-55f5-48d0-83d2-e82015b807e8\") " pod="openstack/glance-default-internal-api-0" Oct 06 13:56:53 crc kubenswrapper[4757]: I1006 13:56:53.513781 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s9bt6\" (UniqueName: \"kubernetes.io/projected/76cdc407-bb9a-44dc-82eb-c9b083395c8d-kube-api-access-s9bt6\") pod \"keystone-bootstrap-v77kw\" (UID: \"76cdc407-bb9a-44dc-82eb-c9b083395c8d\") " pod="openstack/keystone-bootstrap-v77kw" Oct 06 13:56:53 crc kubenswrapper[4757]: I1006 13:56:53.514078 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/76cdc407-bb9a-44dc-82eb-c9b083395c8d-fernet-keys\") pod \"keystone-bootstrap-v77kw\" (UID: \"76cdc407-bb9a-44dc-82eb-c9b083395c8d\") " pod="openstack/keystone-bootstrap-v77kw" Oct 06 13:56:53 crc kubenswrapper[4757]: I1006 13:56:53.514444 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/76cdc407-bb9a-44dc-82eb-c9b083395c8d-scripts\") pod \"keystone-bootstrap-v77kw\" (UID: \"76cdc407-bb9a-44dc-82eb-c9b083395c8d\") " pod="openstack/keystone-bootstrap-v77kw" Oct 06 13:56:53 crc kubenswrapper[4757]: I1006 13:56:53.514513 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/76cdc407-bb9a-44dc-82eb-c9b083395c8d-combined-ca-bundle\") pod \"keystone-bootstrap-v77kw\" (UID: \"76cdc407-bb9a-44dc-82eb-c9b083395c8d\") " pod="openstack/keystone-bootstrap-v77kw" Oct 06 13:56:53 crc kubenswrapper[4757]: I1006 13:56:53.514544 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/76cdc407-bb9a-44dc-82eb-c9b083395c8d-config-data\") pod \"keystone-bootstrap-v77kw\" (UID: \"76cdc407-bb9a-44dc-82eb-c9b083395c8d\") " pod="openstack/keystone-bootstrap-v77kw" Oct 06 13:56:53 crc kubenswrapper[4757]: I1006 13:56:53.514697 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/76cdc407-bb9a-44dc-82eb-c9b083395c8d-credential-keys\") pod \"keystone-bootstrap-v77kw\" (UID: \"76cdc407-bb9a-44dc-82eb-c9b083395c8d\") " pod="openstack/keystone-bootstrap-v77kw" Oct 06 13:56:53 crc kubenswrapper[4757]: I1006 13:56:53.517372 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-internal-api-0\" (UID: \"554a44d5-55f5-48d0-83d2-e82015b807e8\") " pod="openstack/glance-default-internal-api-0" Oct 06 13:56:53 crc kubenswrapper[4757]: I1006 13:56:53.615999 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/76cdc407-bb9a-44dc-82eb-c9b083395c8d-combined-ca-bundle\") pod \"keystone-bootstrap-v77kw\" (UID: \"76cdc407-bb9a-44dc-82eb-c9b083395c8d\") " pod="openstack/keystone-bootstrap-v77kw" Oct 06 13:56:53 crc kubenswrapper[4757]: I1006 13:56:53.616038 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/76cdc407-bb9a-44dc-82eb-c9b083395c8d-config-data\") pod \"keystone-bootstrap-v77kw\" (UID: \"76cdc407-bb9a-44dc-82eb-c9b083395c8d\") " pod="openstack/keystone-bootstrap-v77kw" Oct 06 13:56:53 crc kubenswrapper[4757]: I1006 13:56:53.616167 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/76cdc407-bb9a-44dc-82eb-c9b083395c8d-credential-keys\") pod \"keystone-bootstrap-v77kw\" (UID: \"76cdc407-bb9a-44dc-82eb-c9b083395c8d\") " pod="openstack/keystone-bootstrap-v77kw" Oct 06 13:56:53 crc kubenswrapper[4757]: I1006 13:56:53.616236 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s9bt6\" (UniqueName: \"kubernetes.io/projected/76cdc407-bb9a-44dc-82eb-c9b083395c8d-kube-api-access-s9bt6\") pod \"keystone-bootstrap-v77kw\" (UID: \"76cdc407-bb9a-44dc-82eb-c9b083395c8d\") " pod="openstack/keystone-bootstrap-v77kw" Oct 06 13:56:53 crc kubenswrapper[4757]: I1006 13:56:53.616258 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/76cdc407-bb9a-44dc-82eb-c9b083395c8d-fernet-keys\") pod \"keystone-bootstrap-v77kw\" (UID: \"76cdc407-bb9a-44dc-82eb-c9b083395c8d\") " pod="openstack/keystone-bootstrap-v77kw" Oct 06 13:56:53 crc kubenswrapper[4757]: I1006 13:56:53.616287 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/76cdc407-bb9a-44dc-82eb-c9b083395c8d-scripts\") pod \"keystone-bootstrap-v77kw\" (UID: \"76cdc407-bb9a-44dc-82eb-c9b083395c8d\") " pod="openstack/keystone-bootstrap-v77kw" Oct 06 13:56:53 crc kubenswrapper[4757]: I1006 13:56:53.624961 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/76cdc407-bb9a-44dc-82eb-c9b083395c8d-credential-keys\") pod \"keystone-bootstrap-v77kw\" (UID: \"76cdc407-bb9a-44dc-82eb-c9b083395c8d\") " pod="openstack/keystone-bootstrap-v77kw" Oct 06 13:56:53 crc kubenswrapper[4757]: I1006 13:56:53.625275 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/76cdc407-bb9a-44dc-82eb-c9b083395c8d-scripts\") pod \"keystone-bootstrap-v77kw\" (UID: \"76cdc407-bb9a-44dc-82eb-c9b083395c8d\") " pod="openstack/keystone-bootstrap-v77kw" Oct 06 13:56:53 crc kubenswrapper[4757]: I1006 13:56:53.625861 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/76cdc407-bb9a-44dc-82eb-c9b083395c8d-fernet-keys\") pod \"keystone-bootstrap-v77kw\" (UID: \"76cdc407-bb9a-44dc-82eb-c9b083395c8d\") " pod="openstack/keystone-bootstrap-v77kw" Oct 06 13:56:53 crc kubenswrapper[4757]: I1006 13:56:53.626414 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/76cdc407-bb9a-44dc-82eb-c9b083395c8d-combined-ca-bundle\") pod \"keystone-bootstrap-v77kw\" (UID: \"76cdc407-bb9a-44dc-82eb-c9b083395c8d\") " pod="openstack/keystone-bootstrap-v77kw" Oct 06 13:56:53 crc kubenswrapper[4757]: I1006 13:56:53.626912 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/76cdc407-bb9a-44dc-82eb-c9b083395c8d-config-data\") pod \"keystone-bootstrap-v77kw\" (UID: \"76cdc407-bb9a-44dc-82eb-c9b083395c8d\") " pod="openstack/keystone-bootstrap-v77kw" Oct 06 13:56:53 crc kubenswrapper[4757]: I1006 13:56:53.632156 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s9bt6\" (UniqueName: \"kubernetes.io/projected/76cdc407-bb9a-44dc-82eb-c9b083395c8d-kube-api-access-s9bt6\") pod \"keystone-bootstrap-v77kw\" (UID: \"76cdc407-bb9a-44dc-82eb-c9b083395c8d\") " pod="openstack/keystone-bootstrap-v77kw" Oct 06 13:56:53 crc kubenswrapper[4757]: I1006 13:56:53.659063 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-v77kw" Oct 06 13:56:53 crc kubenswrapper[4757]: I1006 13:56:53.663865 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-4nqxd"] Oct 06 13:56:53 crc kubenswrapper[4757]: I1006 13:56:53.847811 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-kjpw4" event={"ID":"434206fd-9f64-4c9d-a528-55d9361dad92","Type":"ContainerStarted","Data":"aa3678cfeaf79fb26c4c0f487ec077377631d000fb2a222d822e9b0d65cb8401"} Oct 06 13:56:53 crc kubenswrapper[4757]: I1006 13:56:53.847870 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-kjpw4" event={"ID":"434206fd-9f64-4c9d-a528-55d9361dad92","Type":"ContainerStarted","Data":"93a97935600a742251eff99e1f8f114cc37502cd986c26b545d6fae8e34bac58"} Oct 06 13:56:53 crc kubenswrapper[4757]: I1006 13:56:53.872814 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-4nqxd" event={"ID":"f705f558-519e-489e-8ff4-5b3eb4476eff","Type":"ContainerStarted","Data":"5ebc87f9d0dd75bcaf02f76704f012782a0b2d7f879978c06b4b68ec0893c6f6"} Oct 06 13:56:53 crc kubenswrapper[4757]: I1006 13:56:53.875759 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 06 13:56:53 crc kubenswrapper[4757]: I1006 13:56:53.892776 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-kjpw4" podStartSLOduration=6.8927090159999995 podStartE2EDuration="6.892709016s" podCreationTimestamp="2025-10-06 13:56:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:56:53.88731564 +0000 UTC m=+1102.384634177" watchObservedRunningTime="2025-10-06 13:56:53.892709016 +0000 UTC m=+1102.390027573" Oct 06 13:56:53 crc kubenswrapper[4757]: I1006 13:56:53.904555 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 06 13:56:53 crc kubenswrapper[4757]: I1006 13:56:53.934386 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 06 13:56:54 crc kubenswrapper[4757]: I1006 13:56:54.040007 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/554a44d5-55f5-48d0-83d2-e82015b807e8-config-data\") pod \"554a44d5-55f5-48d0-83d2-e82015b807e8\" (UID: \"554a44d5-55f5-48d0-83d2-e82015b807e8\") " Oct 06 13:56:54 crc kubenswrapper[4757]: I1006 13:56:54.040149 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/554a44d5-55f5-48d0-83d2-e82015b807e8-combined-ca-bundle\") pod \"554a44d5-55f5-48d0-83d2-e82015b807e8\" (UID: \"554a44d5-55f5-48d0-83d2-e82015b807e8\") " Oct 06 13:56:54 crc kubenswrapper[4757]: I1006 13:56:54.040213 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/554a44d5-55f5-48d0-83d2-e82015b807e8-logs\") pod \"554a44d5-55f5-48d0-83d2-e82015b807e8\" (UID: \"554a44d5-55f5-48d0-83d2-e82015b807e8\") " Oct 06 13:56:54 crc kubenswrapper[4757]: I1006 13:56:54.040258 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"554a44d5-55f5-48d0-83d2-e82015b807e8\" (UID: \"554a44d5-55f5-48d0-83d2-e82015b807e8\") " Oct 06 13:56:54 crc kubenswrapper[4757]: I1006 13:56:54.040319 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/554a44d5-55f5-48d0-83d2-e82015b807e8-httpd-run\") pod \"554a44d5-55f5-48d0-83d2-e82015b807e8\" (UID: \"554a44d5-55f5-48d0-83d2-e82015b807e8\") " Oct 06 13:56:54 crc kubenswrapper[4757]: I1006 13:56:54.040350 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hz86l\" (UniqueName: \"kubernetes.io/projected/554a44d5-55f5-48d0-83d2-e82015b807e8-kube-api-access-hz86l\") pod \"554a44d5-55f5-48d0-83d2-e82015b807e8\" (UID: \"554a44d5-55f5-48d0-83d2-e82015b807e8\") " Oct 06 13:56:54 crc kubenswrapper[4757]: I1006 13:56:54.040374 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/554a44d5-55f5-48d0-83d2-e82015b807e8-scripts\") pod \"554a44d5-55f5-48d0-83d2-e82015b807e8\" (UID: \"554a44d5-55f5-48d0-83d2-e82015b807e8\") " Oct 06 13:56:54 crc kubenswrapper[4757]: I1006 13:56:54.043167 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/554a44d5-55f5-48d0-83d2-e82015b807e8-logs" (OuterVolumeSpecName: "logs") pod "554a44d5-55f5-48d0-83d2-e82015b807e8" (UID: "554a44d5-55f5-48d0-83d2-e82015b807e8"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 13:56:54 crc kubenswrapper[4757]: I1006 13:56:54.043502 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/554a44d5-55f5-48d0-83d2-e82015b807e8-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "554a44d5-55f5-48d0-83d2-e82015b807e8" (UID: "554a44d5-55f5-48d0-83d2-e82015b807e8"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 13:56:54 crc kubenswrapper[4757]: I1006 13:56:54.063716 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/554a44d5-55f5-48d0-83d2-e82015b807e8-config-data" (OuterVolumeSpecName: "config-data") pod "554a44d5-55f5-48d0-83d2-e82015b807e8" (UID: "554a44d5-55f5-48d0-83d2-e82015b807e8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:56:54 crc kubenswrapper[4757]: I1006 13:56:54.064304 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/554a44d5-55f5-48d0-83d2-e82015b807e8-scripts" (OuterVolumeSpecName: "scripts") pod "554a44d5-55f5-48d0-83d2-e82015b807e8" (UID: "554a44d5-55f5-48d0-83d2-e82015b807e8"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:56:54 crc kubenswrapper[4757]: I1006 13:56:54.065329 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/554a44d5-55f5-48d0-83d2-e82015b807e8-kube-api-access-hz86l" (OuterVolumeSpecName: "kube-api-access-hz86l") pod "554a44d5-55f5-48d0-83d2-e82015b807e8" (UID: "554a44d5-55f5-48d0-83d2-e82015b807e8"). InnerVolumeSpecName "kube-api-access-hz86l". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:56:54 crc kubenswrapper[4757]: I1006 13:56:54.065451 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/554a44d5-55f5-48d0-83d2-e82015b807e8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "554a44d5-55f5-48d0-83d2-e82015b807e8" (UID: "554a44d5-55f5-48d0-83d2-e82015b807e8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:56:54 crc kubenswrapper[4757]: I1006 13:56:54.065473 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage02-crc" (OuterVolumeSpecName: "glance") pod "554a44d5-55f5-48d0-83d2-e82015b807e8" (UID: "554a44d5-55f5-48d0-83d2-e82015b807e8"). InnerVolumeSpecName "local-storage02-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 06 13:56:54 crc kubenswrapper[4757]: I1006 13:56:54.143111 4757 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/554a44d5-55f5-48d0-83d2-e82015b807e8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 06 13:56:54 crc kubenswrapper[4757]: I1006 13:56:54.143157 4757 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/554a44d5-55f5-48d0-83d2-e82015b807e8-logs\") on node \"crc\" DevicePath \"\"" Oct 06 13:56:54 crc kubenswrapper[4757]: I1006 13:56:54.143200 4757 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" " Oct 06 13:56:54 crc kubenswrapper[4757]: I1006 13:56:54.143212 4757 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/554a44d5-55f5-48d0-83d2-e82015b807e8-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 06 13:56:54 crc kubenswrapper[4757]: I1006 13:56:54.143223 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hz86l\" (UniqueName: \"kubernetes.io/projected/554a44d5-55f5-48d0-83d2-e82015b807e8-kube-api-access-hz86l\") on node \"crc\" DevicePath \"\"" Oct 06 13:56:54 crc kubenswrapper[4757]: I1006 13:56:54.143236 4757 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/554a44d5-55f5-48d0-83d2-e82015b807e8-scripts\") on node \"crc\" DevicePath \"\"" Oct 06 13:56:54 crc kubenswrapper[4757]: I1006 13:56:54.143246 4757 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/554a44d5-55f5-48d0-83d2-e82015b807e8-config-data\") on node \"crc\" DevicePath \"\"" Oct 06 13:56:54 crc kubenswrapper[4757]: I1006 13:56:54.176522 4757 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage02-crc" (UniqueName: "kubernetes.io/local-volume/local-storage02-crc") on node "crc" Oct 06 13:56:54 crc kubenswrapper[4757]: I1006 13:56:54.201632 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="184e3393-41a0-494a-abe2-3948d5c57138" path="/var/lib/kubelet/pods/184e3393-41a0-494a-abe2-3948d5c57138/volumes" Oct 06 13:56:54 crc kubenswrapper[4757]: I1006 13:56:54.202462 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5a24c99b-42cc-41e8-b2e1-44c7850fc605" path="/var/lib/kubelet/pods/5a24c99b-42cc-41e8-b2e1-44c7850fc605/volumes" Oct 06 13:56:54 crc kubenswrapper[4757]: I1006 13:56:54.203049 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c1c587f7-5a59-48ad-8876-a8ca7260d9e1" path="/var/lib/kubelet/pods/c1c587f7-5a59-48ad-8876-a8ca7260d9e1/volumes" Oct 06 13:56:54 crc kubenswrapper[4757]: I1006 13:56:54.224767 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-v77kw"] Oct 06 13:56:54 crc kubenswrapper[4757]: I1006 13:56:54.244741 4757 reconciler_common.go:293] "Volume detached for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" DevicePath \"\"" Oct 06 13:56:54 crc kubenswrapper[4757]: I1006 13:56:54.595326 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-59bc97769f-w5b7p" Oct 06 13:56:54 crc kubenswrapper[4757]: I1006 13:56:54.665746 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7fb487d997-ctdfh"] Oct 06 13:56:54 crc kubenswrapper[4757]: I1006 13:56:54.665979 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7fb487d997-ctdfh" podUID="71d18dba-d344-4779-9b18-cecbe20b22eb" containerName="dnsmasq-dns" containerID="cri-o://5606e8efd9a2437fd5572db5e168149262d1347c44f3fb369868f75a0a20065a" gracePeriod=10 Oct 06 13:56:54 crc kubenswrapper[4757]: I1006 13:56:54.889966 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-v77kw" event={"ID":"76cdc407-bb9a-44dc-82eb-c9b083395c8d","Type":"ContainerStarted","Data":"ef5fc8e1220fa912fb9cb4914f707483933fe1776f2a4225976e2ab361b1ce5a"} Oct 06 13:56:54 crc kubenswrapper[4757]: I1006 13:56:54.893644 4757 generic.go:334] "Generic (PLEG): container finished" podID="71d18dba-d344-4779-9b18-cecbe20b22eb" containerID="5606e8efd9a2437fd5572db5e168149262d1347c44f3fb369868f75a0a20065a" exitCode=0 Oct 06 13:56:54 crc kubenswrapper[4757]: I1006 13:56:54.893713 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fb487d997-ctdfh" event={"ID":"71d18dba-d344-4779-9b18-cecbe20b22eb","Type":"ContainerDied","Data":"5606e8efd9a2437fd5572db5e168149262d1347c44f3fb369868f75a0a20065a"} Oct 06 13:56:54 crc kubenswrapper[4757]: I1006 13:56:54.901105 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162","Type":"ContainerStarted","Data":"a3e0134d66bb89269e9b6e10be5999c12c3e61a50dd6b03ef5cc78f7a1d9590b"} Oct 06 13:56:54 crc kubenswrapper[4757]: I1006 13:56:54.901151 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162","Type":"ContainerStarted","Data":"ddaadff2159738ae07a5e0b9679af6ad9973d6cbf7bb0e373f3b04df6c7535f0"} Oct 06 13:56:54 crc kubenswrapper[4757]: I1006 13:56:54.901184 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 06 13:56:54 crc kubenswrapper[4757]: I1006 13:56:54.947553 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 06 13:56:54 crc kubenswrapper[4757]: I1006 13:56:54.958823 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 06 13:56:54 crc kubenswrapper[4757]: I1006 13:56:54.979387 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 06 13:56:54 crc kubenswrapper[4757]: I1006 13:56:54.980767 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 06 13:56:54 crc kubenswrapper[4757]: I1006 13:56:54.984973 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Oct 06 13:56:54 crc kubenswrapper[4757]: I1006 13:56:54.985170 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Oct 06 13:56:54 crc kubenswrapper[4757]: I1006 13:56:54.986533 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 06 13:56:55 crc kubenswrapper[4757]: I1006 13:56:55.164221 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cc62b42c-2672-4413-9768-4949a52c7659-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"cc62b42c-2672-4413-9768-4949a52c7659\") " pod="openstack/glance-default-internal-api-0" Oct 06 13:56:55 crc kubenswrapper[4757]: I1006 13:56:55.164745 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cc62b42c-2672-4413-9768-4949a52c7659-logs\") pod \"glance-default-internal-api-0\" (UID: \"cc62b42c-2672-4413-9768-4949a52c7659\") " pod="openstack/glance-default-internal-api-0" Oct 06 13:56:55 crc kubenswrapper[4757]: I1006 13:56:55.164862 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc62b42c-2672-4413-9768-4949a52c7659-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"cc62b42c-2672-4413-9768-4949a52c7659\") " pod="openstack/glance-default-internal-api-0" Oct 06 13:56:55 crc kubenswrapper[4757]: I1006 13:56:55.164917 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7q8l4\" (UniqueName: \"kubernetes.io/projected/cc62b42c-2672-4413-9768-4949a52c7659-kube-api-access-7q8l4\") pod \"glance-default-internal-api-0\" (UID: \"cc62b42c-2672-4413-9768-4949a52c7659\") " pod="openstack/glance-default-internal-api-0" Oct 06 13:56:55 crc kubenswrapper[4757]: I1006 13:56:55.165501 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/cc62b42c-2672-4413-9768-4949a52c7659-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"cc62b42c-2672-4413-9768-4949a52c7659\") " pod="openstack/glance-default-internal-api-0" Oct 06 13:56:55 crc kubenswrapper[4757]: I1006 13:56:55.165541 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-internal-api-0\" (UID: \"cc62b42c-2672-4413-9768-4949a52c7659\") " pod="openstack/glance-default-internal-api-0" Oct 06 13:56:55 crc kubenswrapper[4757]: I1006 13:56:55.165563 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cc62b42c-2672-4413-9768-4949a52c7659-config-data\") pod \"glance-default-internal-api-0\" (UID: \"cc62b42c-2672-4413-9768-4949a52c7659\") " pod="openstack/glance-default-internal-api-0" Oct 06 13:56:55 crc kubenswrapper[4757]: I1006 13:56:55.165595 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cc62b42c-2672-4413-9768-4949a52c7659-scripts\") pod \"glance-default-internal-api-0\" (UID: \"cc62b42c-2672-4413-9768-4949a52c7659\") " pod="openstack/glance-default-internal-api-0" Oct 06 13:56:55 crc kubenswrapper[4757]: I1006 13:56:55.267278 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc62b42c-2672-4413-9768-4949a52c7659-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"cc62b42c-2672-4413-9768-4949a52c7659\") " pod="openstack/glance-default-internal-api-0" Oct 06 13:56:55 crc kubenswrapper[4757]: I1006 13:56:55.267619 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7q8l4\" (UniqueName: \"kubernetes.io/projected/cc62b42c-2672-4413-9768-4949a52c7659-kube-api-access-7q8l4\") pod \"glance-default-internal-api-0\" (UID: \"cc62b42c-2672-4413-9768-4949a52c7659\") " pod="openstack/glance-default-internal-api-0" Oct 06 13:56:55 crc kubenswrapper[4757]: I1006 13:56:55.267670 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/cc62b42c-2672-4413-9768-4949a52c7659-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"cc62b42c-2672-4413-9768-4949a52c7659\") " pod="openstack/glance-default-internal-api-0" Oct 06 13:56:55 crc kubenswrapper[4757]: I1006 13:56:55.267691 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-internal-api-0\" (UID: \"cc62b42c-2672-4413-9768-4949a52c7659\") " pod="openstack/glance-default-internal-api-0" Oct 06 13:56:55 crc kubenswrapper[4757]: I1006 13:56:55.267710 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cc62b42c-2672-4413-9768-4949a52c7659-config-data\") pod \"glance-default-internal-api-0\" (UID: \"cc62b42c-2672-4413-9768-4949a52c7659\") " pod="openstack/glance-default-internal-api-0" Oct 06 13:56:55 crc kubenswrapper[4757]: I1006 13:56:55.267732 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cc62b42c-2672-4413-9768-4949a52c7659-scripts\") pod \"glance-default-internal-api-0\" (UID: \"cc62b42c-2672-4413-9768-4949a52c7659\") " pod="openstack/glance-default-internal-api-0" Oct 06 13:56:55 crc kubenswrapper[4757]: I1006 13:56:55.267774 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cc62b42c-2672-4413-9768-4949a52c7659-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"cc62b42c-2672-4413-9768-4949a52c7659\") " pod="openstack/glance-default-internal-api-0" Oct 06 13:56:55 crc kubenswrapper[4757]: I1006 13:56:55.267790 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cc62b42c-2672-4413-9768-4949a52c7659-logs\") pod \"glance-default-internal-api-0\" (UID: \"cc62b42c-2672-4413-9768-4949a52c7659\") " pod="openstack/glance-default-internal-api-0" Oct 06 13:56:55 crc kubenswrapper[4757]: I1006 13:56:55.268170 4757 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-internal-api-0\" (UID: \"cc62b42c-2672-4413-9768-4949a52c7659\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/glance-default-internal-api-0" Oct 06 13:56:55 crc kubenswrapper[4757]: I1006 13:56:55.268318 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/cc62b42c-2672-4413-9768-4949a52c7659-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"cc62b42c-2672-4413-9768-4949a52c7659\") " pod="openstack/glance-default-internal-api-0" Oct 06 13:56:55 crc kubenswrapper[4757]: I1006 13:56:55.269036 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cc62b42c-2672-4413-9768-4949a52c7659-logs\") pod \"glance-default-internal-api-0\" (UID: \"cc62b42c-2672-4413-9768-4949a52c7659\") " pod="openstack/glance-default-internal-api-0" Oct 06 13:56:55 crc kubenswrapper[4757]: I1006 13:56:55.273431 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cc62b42c-2672-4413-9768-4949a52c7659-scripts\") pod \"glance-default-internal-api-0\" (UID: \"cc62b42c-2672-4413-9768-4949a52c7659\") " pod="openstack/glance-default-internal-api-0" Oct 06 13:56:55 crc kubenswrapper[4757]: I1006 13:56:55.275055 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc62b42c-2672-4413-9768-4949a52c7659-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"cc62b42c-2672-4413-9768-4949a52c7659\") " pod="openstack/glance-default-internal-api-0" Oct 06 13:56:55 crc kubenswrapper[4757]: I1006 13:56:55.276346 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cc62b42c-2672-4413-9768-4949a52c7659-config-data\") pod \"glance-default-internal-api-0\" (UID: \"cc62b42c-2672-4413-9768-4949a52c7659\") " pod="openstack/glance-default-internal-api-0" Oct 06 13:56:55 crc kubenswrapper[4757]: I1006 13:56:55.288740 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cc62b42c-2672-4413-9768-4949a52c7659-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"cc62b42c-2672-4413-9768-4949a52c7659\") " pod="openstack/glance-default-internal-api-0" Oct 06 13:56:55 crc kubenswrapper[4757]: I1006 13:56:55.290183 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7q8l4\" (UniqueName: \"kubernetes.io/projected/cc62b42c-2672-4413-9768-4949a52c7659-kube-api-access-7q8l4\") pod \"glance-default-internal-api-0\" (UID: \"cc62b42c-2672-4413-9768-4949a52c7659\") " pod="openstack/glance-default-internal-api-0" Oct 06 13:56:55 crc kubenswrapper[4757]: I1006 13:56:55.295982 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-internal-api-0\" (UID: \"cc62b42c-2672-4413-9768-4949a52c7659\") " pod="openstack/glance-default-internal-api-0" Oct 06 13:56:55 crc kubenswrapper[4757]: I1006 13:56:55.305910 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 06 13:56:55 crc kubenswrapper[4757]: I1006 13:56:55.627291 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7fb487d997-ctdfh" Oct 06 13:56:55 crc kubenswrapper[4757]: I1006 13:56:55.776121 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/71d18dba-d344-4779-9b18-cecbe20b22eb-ovsdbserver-sb\") pod \"71d18dba-d344-4779-9b18-cecbe20b22eb\" (UID: \"71d18dba-d344-4779-9b18-cecbe20b22eb\") " Oct 06 13:56:55 crc kubenswrapper[4757]: I1006 13:56:55.778530 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/71d18dba-d344-4779-9b18-cecbe20b22eb-ovsdbserver-nb\") pod \"71d18dba-d344-4779-9b18-cecbe20b22eb\" (UID: \"71d18dba-d344-4779-9b18-cecbe20b22eb\") " Oct 06 13:56:55 crc kubenswrapper[4757]: I1006 13:56:55.778951 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-69x4d\" (UniqueName: \"kubernetes.io/projected/71d18dba-d344-4779-9b18-cecbe20b22eb-kube-api-access-69x4d\") pod \"71d18dba-d344-4779-9b18-cecbe20b22eb\" (UID: \"71d18dba-d344-4779-9b18-cecbe20b22eb\") " Oct 06 13:56:55 crc kubenswrapper[4757]: I1006 13:56:55.778986 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/71d18dba-d344-4779-9b18-cecbe20b22eb-config\") pod \"71d18dba-d344-4779-9b18-cecbe20b22eb\" (UID: \"71d18dba-d344-4779-9b18-cecbe20b22eb\") " Oct 06 13:56:55 crc kubenswrapper[4757]: I1006 13:56:55.779008 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/71d18dba-d344-4779-9b18-cecbe20b22eb-dns-svc\") pod \"71d18dba-d344-4779-9b18-cecbe20b22eb\" (UID: \"71d18dba-d344-4779-9b18-cecbe20b22eb\") " Oct 06 13:56:55 crc kubenswrapper[4757]: I1006 13:56:55.779043 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/71d18dba-d344-4779-9b18-cecbe20b22eb-dns-swift-storage-0\") pod \"71d18dba-d344-4779-9b18-cecbe20b22eb\" (UID: \"71d18dba-d344-4779-9b18-cecbe20b22eb\") " Oct 06 13:56:55 crc kubenswrapper[4757]: I1006 13:56:55.782933 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/71d18dba-d344-4779-9b18-cecbe20b22eb-kube-api-access-69x4d" (OuterVolumeSpecName: "kube-api-access-69x4d") pod "71d18dba-d344-4779-9b18-cecbe20b22eb" (UID: "71d18dba-d344-4779-9b18-cecbe20b22eb"). InnerVolumeSpecName "kube-api-access-69x4d". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:56:55 crc kubenswrapper[4757]: I1006 13:56:55.828416 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/71d18dba-d344-4779-9b18-cecbe20b22eb-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "71d18dba-d344-4779-9b18-cecbe20b22eb" (UID: "71d18dba-d344-4779-9b18-cecbe20b22eb"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:56:55 crc kubenswrapper[4757]: I1006 13:56:55.833318 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/71d18dba-d344-4779-9b18-cecbe20b22eb-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "71d18dba-d344-4779-9b18-cecbe20b22eb" (UID: "71d18dba-d344-4779-9b18-cecbe20b22eb"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:56:55 crc kubenswrapper[4757]: I1006 13:56:55.836911 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/71d18dba-d344-4779-9b18-cecbe20b22eb-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "71d18dba-d344-4779-9b18-cecbe20b22eb" (UID: "71d18dba-d344-4779-9b18-cecbe20b22eb"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:56:55 crc kubenswrapper[4757]: I1006 13:56:55.851480 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/71d18dba-d344-4779-9b18-cecbe20b22eb-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "71d18dba-d344-4779-9b18-cecbe20b22eb" (UID: "71d18dba-d344-4779-9b18-cecbe20b22eb"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:56:55 crc kubenswrapper[4757]: I1006 13:56:55.858040 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/71d18dba-d344-4779-9b18-cecbe20b22eb-config" (OuterVolumeSpecName: "config") pod "71d18dba-d344-4779-9b18-cecbe20b22eb" (UID: "71d18dba-d344-4779-9b18-cecbe20b22eb"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:56:55 crc kubenswrapper[4757]: I1006 13:56:55.881371 4757 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/71d18dba-d344-4779-9b18-cecbe20b22eb-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 06 13:56:55 crc kubenswrapper[4757]: I1006 13:56:55.881419 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-69x4d\" (UniqueName: \"kubernetes.io/projected/71d18dba-d344-4779-9b18-cecbe20b22eb-kube-api-access-69x4d\") on node \"crc\" DevicePath \"\"" Oct 06 13:56:55 crc kubenswrapper[4757]: I1006 13:56:55.881434 4757 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/71d18dba-d344-4779-9b18-cecbe20b22eb-config\") on node \"crc\" DevicePath \"\"" Oct 06 13:56:55 crc kubenswrapper[4757]: I1006 13:56:55.881446 4757 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/71d18dba-d344-4779-9b18-cecbe20b22eb-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 06 13:56:55 crc kubenswrapper[4757]: I1006 13:56:55.881460 4757 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/71d18dba-d344-4779-9b18-cecbe20b22eb-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 06 13:56:55 crc kubenswrapper[4757]: I1006 13:56:55.881470 4757 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/71d18dba-d344-4779-9b18-cecbe20b22eb-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 06 13:56:55 crc kubenswrapper[4757]: I1006 13:56:55.917676 4757 generic.go:334] "Generic (PLEG): container finished" podID="1ced3006-a395-423b-8429-f35beb1398b0" containerID="261f6b4aa7ccd2c620651f90574d988ef7b35bce2e0e2eb1b3acfa5a31d3ae4a" exitCode=0 Oct 06 13:56:55 crc kubenswrapper[4757]: I1006 13:56:55.917786 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-kssfb" event={"ID":"1ced3006-a395-423b-8429-f35beb1398b0","Type":"ContainerDied","Data":"261f6b4aa7ccd2c620651f90574d988ef7b35bce2e0e2eb1b3acfa5a31d3ae4a"} Oct 06 13:56:55 crc kubenswrapper[4757]: I1006 13:56:55.930496 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-v77kw" event={"ID":"76cdc407-bb9a-44dc-82eb-c9b083395c8d","Type":"ContainerStarted","Data":"7f6ec78624b0739a9256eb6ee704e52cd0322c374b53cffbaf675ecadc0c2437"} Oct 06 13:56:55 crc kubenswrapper[4757]: I1006 13:56:55.935539 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fb487d997-ctdfh" event={"ID":"71d18dba-d344-4779-9b18-cecbe20b22eb","Type":"ContainerDied","Data":"a5efeb65cad4fc6e4765880259b7d437beb6f49fe28587d0e23fca2854f490dd"} Oct 06 13:56:55 crc kubenswrapper[4757]: I1006 13:56:55.935699 4757 scope.go:117] "RemoveContainer" containerID="5606e8efd9a2437fd5572db5e168149262d1347c44f3fb369868f75a0a20065a" Oct 06 13:56:55 crc kubenswrapper[4757]: I1006 13:56:55.938696 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7fb487d997-ctdfh" Oct 06 13:56:55 crc kubenswrapper[4757]: I1006 13:56:55.998021 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-v77kw" podStartSLOduration=2.998002346 podStartE2EDuration="2.998002346s" podCreationTimestamp="2025-10-06 13:56:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:56:55.9706871 +0000 UTC m=+1104.468005637" watchObservedRunningTime="2025-10-06 13:56:55.998002346 +0000 UTC m=+1104.495320883" Oct 06 13:56:56 crc kubenswrapper[4757]: I1006 13:56:56.005707 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7fb487d997-ctdfh"] Oct 06 13:56:56 crc kubenswrapper[4757]: I1006 13:56:56.012254 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7fb487d997-ctdfh"] Oct 06 13:56:56 crc kubenswrapper[4757]: I1006 13:56:56.194753 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="554a44d5-55f5-48d0-83d2-e82015b807e8" path="/var/lib/kubelet/pods/554a44d5-55f5-48d0-83d2-e82015b807e8/volumes" Oct 06 13:56:56 crc kubenswrapper[4757]: I1006 13:56:56.195146 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="71d18dba-d344-4779-9b18-cecbe20b22eb" path="/var/lib/kubelet/pods/71d18dba-d344-4779-9b18-cecbe20b22eb/volumes" Oct 06 13:56:58 crc kubenswrapper[4757]: I1006 13:56:58.162070 4757 scope.go:117] "RemoveContainer" containerID="e8441672de141ceee66a56624b66a6758d58f2d762b07f2d4ef4caa54965e974" Oct 06 13:56:58 crc kubenswrapper[4757]: I1006 13:56:58.386233 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-kssfb" Oct 06 13:56:58 crc kubenswrapper[4757]: I1006 13:56:58.432224 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1ced3006-a395-423b-8429-f35beb1398b0-config-data\") pod \"1ced3006-a395-423b-8429-f35beb1398b0\" (UID: \"1ced3006-a395-423b-8429-f35beb1398b0\") " Oct 06 13:56:58 crc kubenswrapper[4757]: I1006 13:56:58.432644 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1ced3006-a395-423b-8429-f35beb1398b0-scripts\") pod \"1ced3006-a395-423b-8429-f35beb1398b0\" (UID: \"1ced3006-a395-423b-8429-f35beb1398b0\") " Oct 06 13:56:58 crc kubenswrapper[4757]: I1006 13:56:58.432874 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ced3006-a395-423b-8429-f35beb1398b0-combined-ca-bundle\") pod \"1ced3006-a395-423b-8429-f35beb1398b0\" (UID: \"1ced3006-a395-423b-8429-f35beb1398b0\") " Oct 06 13:56:58 crc kubenswrapper[4757]: I1006 13:56:58.433004 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1ced3006-a395-423b-8429-f35beb1398b0-logs\") pod \"1ced3006-a395-423b-8429-f35beb1398b0\" (UID: \"1ced3006-a395-423b-8429-f35beb1398b0\") " Oct 06 13:56:58 crc kubenswrapper[4757]: I1006 13:56:58.433070 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hxjx9\" (UniqueName: \"kubernetes.io/projected/1ced3006-a395-423b-8429-f35beb1398b0-kube-api-access-hxjx9\") pod \"1ced3006-a395-423b-8429-f35beb1398b0\" (UID: \"1ced3006-a395-423b-8429-f35beb1398b0\") " Oct 06 13:56:58 crc kubenswrapper[4757]: I1006 13:56:58.434291 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1ced3006-a395-423b-8429-f35beb1398b0-logs" (OuterVolumeSpecName: "logs") pod "1ced3006-a395-423b-8429-f35beb1398b0" (UID: "1ced3006-a395-423b-8429-f35beb1398b0"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 13:56:58 crc kubenswrapper[4757]: I1006 13:56:58.439152 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1ced3006-a395-423b-8429-f35beb1398b0-scripts" (OuterVolumeSpecName: "scripts") pod "1ced3006-a395-423b-8429-f35beb1398b0" (UID: "1ced3006-a395-423b-8429-f35beb1398b0"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:56:58 crc kubenswrapper[4757]: I1006 13:56:58.439838 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1ced3006-a395-423b-8429-f35beb1398b0-kube-api-access-hxjx9" (OuterVolumeSpecName: "kube-api-access-hxjx9") pod "1ced3006-a395-423b-8429-f35beb1398b0" (UID: "1ced3006-a395-423b-8429-f35beb1398b0"). InnerVolumeSpecName "kube-api-access-hxjx9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:56:58 crc kubenswrapper[4757]: I1006 13:56:58.483374 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1ced3006-a395-423b-8429-f35beb1398b0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1ced3006-a395-423b-8429-f35beb1398b0" (UID: "1ced3006-a395-423b-8429-f35beb1398b0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:56:58 crc kubenswrapper[4757]: I1006 13:56:58.488892 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1ced3006-a395-423b-8429-f35beb1398b0-config-data" (OuterVolumeSpecName: "config-data") pod "1ced3006-a395-423b-8429-f35beb1398b0" (UID: "1ced3006-a395-423b-8429-f35beb1398b0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:56:58 crc kubenswrapper[4757]: I1006 13:56:58.535562 4757 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ced3006-a395-423b-8429-f35beb1398b0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 06 13:56:58 crc kubenswrapper[4757]: I1006 13:56:58.535612 4757 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1ced3006-a395-423b-8429-f35beb1398b0-logs\") on node \"crc\" DevicePath \"\"" Oct 06 13:56:58 crc kubenswrapper[4757]: I1006 13:56:58.535626 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hxjx9\" (UniqueName: \"kubernetes.io/projected/1ced3006-a395-423b-8429-f35beb1398b0-kube-api-access-hxjx9\") on node \"crc\" DevicePath \"\"" Oct 06 13:56:58 crc kubenswrapper[4757]: I1006 13:56:58.535637 4757 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1ced3006-a395-423b-8429-f35beb1398b0-config-data\") on node \"crc\" DevicePath \"\"" Oct 06 13:56:58 crc kubenswrapper[4757]: I1006 13:56:58.535651 4757 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1ced3006-a395-423b-8429-f35beb1398b0-scripts\") on node \"crc\" DevicePath \"\"" Oct 06 13:56:58 crc kubenswrapper[4757]: I1006 13:56:58.761049 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 06 13:56:58 crc kubenswrapper[4757]: I1006 13:56:58.982654 4757 generic.go:334] "Generic (PLEG): container finished" podID="76cdc407-bb9a-44dc-82eb-c9b083395c8d" containerID="7f6ec78624b0739a9256eb6ee704e52cd0322c374b53cffbaf675ecadc0c2437" exitCode=0 Oct 06 13:56:58 crc kubenswrapper[4757]: I1006 13:56:58.982873 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-v77kw" event={"ID":"76cdc407-bb9a-44dc-82eb-c9b083395c8d","Type":"ContainerDied","Data":"7f6ec78624b0739a9256eb6ee704e52cd0322c374b53cffbaf675ecadc0c2437"} Oct 06 13:56:59 crc kubenswrapper[4757]: I1006 13:56:59.001163 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162","Type":"ContainerStarted","Data":"570f055d1b9d883fd2181d75b0306f41240cf366b7fe048bedccb9ac1e317289"} Oct 06 13:56:59 crc kubenswrapper[4757]: I1006 13:56:59.051752 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-kssfb" Oct 06 13:56:59 crc kubenswrapper[4757]: I1006 13:56:59.055008 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-kssfb" event={"ID":"1ced3006-a395-423b-8429-f35beb1398b0","Type":"ContainerDied","Data":"a9347dd93b14823e0ac46669fe95b8a0ac866c8a3247b62776c8ca625c7b18bb"} Oct 06 13:56:59 crc kubenswrapper[4757]: I1006 13:56:59.055044 4757 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a9347dd93b14823e0ac46669fe95b8a0ac866c8a3247b62776c8ca625c7b18bb" Oct 06 13:56:59 crc kubenswrapper[4757]: I1006 13:56:59.058431 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-6znnt" event={"ID":"3bf60902-663f-4bbc-8415-691e6519a557","Type":"ContainerStarted","Data":"e4fa44361d718502c0ccce9f909093402d4d992d6d6aa62c7fa2758b3a57bf2f"} Oct 06 13:56:59 crc kubenswrapper[4757]: I1006 13:56:59.066769 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=7.066743778 podStartE2EDuration="7.066743778s" podCreationTimestamp="2025-10-06 13:56:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:56:59.040639128 +0000 UTC m=+1107.537957665" watchObservedRunningTime="2025-10-06 13:56:59.066743778 +0000 UTC m=+1107.564062315" Oct 06 13:56:59 crc kubenswrapper[4757]: I1006 13:56:59.068353 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"cc62b42c-2672-4413-9768-4949a52c7659","Type":"ContainerStarted","Data":"bea410e00f7dc5385089ba4ccbe81bf22c7880a228a73451cb880f197dfd5e95"} Oct 06 13:56:59 crc kubenswrapper[4757]: I1006 13:56:59.074998 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8","Type":"ContainerStarted","Data":"8b9fd66a10d9bbb98b57c188dcf86b05a757eeddad190a593c9d44c2af604c7b"} Oct 06 13:56:59 crc kubenswrapper[4757]: I1006 13:56:59.080218 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-6znnt" podStartSLOduration=5.66164473 podStartE2EDuration="12.080202631s" podCreationTimestamp="2025-10-06 13:56:47 +0000 UTC" firstStartedPulling="2025-10-06 13:56:51.865182863 +0000 UTC m=+1100.362501421" lastFinishedPulling="2025-10-06 13:56:58.283740785 +0000 UTC m=+1106.781059322" observedRunningTime="2025-10-06 13:56:59.079478318 +0000 UTC m=+1107.576796855" watchObservedRunningTime="2025-10-06 13:56:59.080202631 +0000 UTC m=+1107.577521168" Oct 06 13:56:59 crc kubenswrapper[4757]: I1006 13:56:59.483404 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-ff5468974-c5722"] Oct 06 13:56:59 crc kubenswrapper[4757]: E1006 13:56:59.484147 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1ced3006-a395-423b-8429-f35beb1398b0" containerName="placement-db-sync" Oct 06 13:56:59 crc kubenswrapper[4757]: I1006 13:56:59.484164 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="1ced3006-a395-423b-8429-f35beb1398b0" containerName="placement-db-sync" Oct 06 13:56:59 crc kubenswrapper[4757]: E1006 13:56:59.484204 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="71d18dba-d344-4779-9b18-cecbe20b22eb" containerName="init" Oct 06 13:56:59 crc kubenswrapper[4757]: I1006 13:56:59.484213 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="71d18dba-d344-4779-9b18-cecbe20b22eb" containerName="init" Oct 06 13:56:59 crc kubenswrapper[4757]: E1006 13:56:59.484234 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="71d18dba-d344-4779-9b18-cecbe20b22eb" containerName="dnsmasq-dns" Oct 06 13:56:59 crc kubenswrapper[4757]: I1006 13:56:59.484367 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="71d18dba-d344-4779-9b18-cecbe20b22eb" containerName="dnsmasq-dns" Oct 06 13:56:59 crc kubenswrapper[4757]: I1006 13:56:59.485473 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="1ced3006-a395-423b-8429-f35beb1398b0" containerName="placement-db-sync" Oct 06 13:56:59 crc kubenswrapper[4757]: I1006 13:56:59.485495 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="71d18dba-d344-4779-9b18-cecbe20b22eb" containerName="dnsmasq-dns" Oct 06 13:56:59 crc kubenswrapper[4757]: I1006 13:56:59.487102 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-ff5468974-c5722" Oct 06 13:56:59 crc kubenswrapper[4757]: I1006 13:56:59.500357 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Oct 06 13:56:59 crc kubenswrapper[4757]: I1006 13:56:59.501028 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-public-svc" Oct 06 13:56:59 crc kubenswrapper[4757]: I1006 13:56:59.501237 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-bp2vn" Oct 06 13:56:59 crc kubenswrapper[4757]: I1006 13:56:59.501363 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-internal-svc" Oct 06 13:56:59 crc kubenswrapper[4757]: I1006 13:56:59.501450 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Oct 06 13:56:59 crc kubenswrapper[4757]: I1006 13:56:59.511777 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-ff5468974-c5722"] Oct 06 13:56:59 crc kubenswrapper[4757]: I1006 13:56:59.657268 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/afc51d15-69dd-4900-886c-29a4f372df24-public-tls-certs\") pod \"placement-ff5468974-c5722\" (UID: \"afc51d15-69dd-4900-886c-29a4f372df24\") " pod="openstack/placement-ff5468974-c5722" Oct 06 13:56:59 crc kubenswrapper[4757]: I1006 13:56:59.657338 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tcj28\" (UniqueName: \"kubernetes.io/projected/afc51d15-69dd-4900-886c-29a4f372df24-kube-api-access-tcj28\") pod \"placement-ff5468974-c5722\" (UID: \"afc51d15-69dd-4900-886c-29a4f372df24\") " pod="openstack/placement-ff5468974-c5722" Oct 06 13:56:59 crc kubenswrapper[4757]: I1006 13:56:59.657369 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/afc51d15-69dd-4900-886c-29a4f372df24-combined-ca-bundle\") pod \"placement-ff5468974-c5722\" (UID: \"afc51d15-69dd-4900-886c-29a4f372df24\") " pod="openstack/placement-ff5468974-c5722" Oct 06 13:56:59 crc kubenswrapper[4757]: I1006 13:56:59.657435 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/afc51d15-69dd-4900-886c-29a4f372df24-config-data\") pod \"placement-ff5468974-c5722\" (UID: \"afc51d15-69dd-4900-886c-29a4f372df24\") " pod="openstack/placement-ff5468974-c5722" Oct 06 13:56:59 crc kubenswrapper[4757]: I1006 13:56:59.657515 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/afc51d15-69dd-4900-886c-29a4f372df24-logs\") pod \"placement-ff5468974-c5722\" (UID: \"afc51d15-69dd-4900-886c-29a4f372df24\") " pod="openstack/placement-ff5468974-c5722" Oct 06 13:56:59 crc kubenswrapper[4757]: I1006 13:56:59.657585 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/afc51d15-69dd-4900-886c-29a4f372df24-scripts\") pod \"placement-ff5468974-c5722\" (UID: \"afc51d15-69dd-4900-886c-29a4f372df24\") " pod="openstack/placement-ff5468974-c5722" Oct 06 13:56:59 crc kubenswrapper[4757]: I1006 13:56:59.657608 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/afc51d15-69dd-4900-886c-29a4f372df24-internal-tls-certs\") pod \"placement-ff5468974-c5722\" (UID: \"afc51d15-69dd-4900-886c-29a4f372df24\") " pod="openstack/placement-ff5468974-c5722" Oct 06 13:56:59 crc kubenswrapper[4757]: I1006 13:56:59.758806 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/afc51d15-69dd-4900-886c-29a4f372df24-config-data\") pod \"placement-ff5468974-c5722\" (UID: \"afc51d15-69dd-4900-886c-29a4f372df24\") " pod="openstack/placement-ff5468974-c5722" Oct 06 13:56:59 crc kubenswrapper[4757]: I1006 13:56:59.758870 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/afc51d15-69dd-4900-886c-29a4f372df24-logs\") pod \"placement-ff5468974-c5722\" (UID: \"afc51d15-69dd-4900-886c-29a4f372df24\") " pod="openstack/placement-ff5468974-c5722" Oct 06 13:56:59 crc kubenswrapper[4757]: I1006 13:56:59.758929 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/afc51d15-69dd-4900-886c-29a4f372df24-scripts\") pod \"placement-ff5468974-c5722\" (UID: \"afc51d15-69dd-4900-886c-29a4f372df24\") " pod="openstack/placement-ff5468974-c5722" Oct 06 13:56:59 crc kubenswrapper[4757]: I1006 13:56:59.758952 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/afc51d15-69dd-4900-886c-29a4f372df24-internal-tls-certs\") pod \"placement-ff5468974-c5722\" (UID: \"afc51d15-69dd-4900-886c-29a4f372df24\") " pod="openstack/placement-ff5468974-c5722" Oct 06 13:56:59 crc kubenswrapper[4757]: I1006 13:56:59.759013 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/afc51d15-69dd-4900-886c-29a4f372df24-public-tls-certs\") pod \"placement-ff5468974-c5722\" (UID: \"afc51d15-69dd-4900-886c-29a4f372df24\") " pod="openstack/placement-ff5468974-c5722" Oct 06 13:56:59 crc kubenswrapper[4757]: I1006 13:56:59.759062 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tcj28\" (UniqueName: \"kubernetes.io/projected/afc51d15-69dd-4900-886c-29a4f372df24-kube-api-access-tcj28\") pod \"placement-ff5468974-c5722\" (UID: \"afc51d15-69dd-4900-886c-29a4f372df24\") " pod="openstack/placement-ff5468974-c5722" Oct 06 13:56:59 crc kubenswrapper[4757]: I1006 13:56:59.759118 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/afc51d15-69dd-4900-886c-29a4f372df24-combined-ca-bundle\") pod \"placement-ff5468974-c5722\" (UID: \"afc51d15-69dd-4900-886c-29a4f372df24\") " pod="openstack/placement-ff5468974-c5722" Oct 06 13:56:59 crc kubenswrapper[4757]: I1006 13:56:59.759565 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/afc51d15-69dd-4900-886c-29a4f372df24-logs\") pod \"placement-ff5468974-c5722\" (UID: \"afc51d15-69dd-4900-886c-29a4f372df24\") " pod="openstack/placement-ff5468974-c5722" Oct 06 13:56:59 crc kubenswrapper[4757]: I1006 13:56:59.763944 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/afc51d15-69dd-4900-886c-29a4f372df24-scripts\") pod \"placement-ff5468974-c5722\" (UID: \"afc51d15-69dd-4900-886c-29a4f372df24\") " pod="openstack/placement-ff5468974-c5722" Oct 06 13:56:59 crc kubenswrapper[4757]: I1006 13:56:59.764640 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/afc51d15-69dd-4900-886c-29a4f372df24-config-data\") pod \"placement-ff5468974-c5722\" (UID: \"afc51d15-69dd-4900-886c-29a4f372df24\") " pod="openstack/placement-ff5468974-c5722" Oct 06 13:56:59 crc kubenswrapper[4757]: I1006 13:56:59.765387 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/afc51d15-69dd-4900-886c-29a4f372df24-internal-tls-certs\") pod \"placement-ff5468974-c5722\" (UID: \"afc51d15-69dd-4900-886c-29a4f372df24\") " pod="openstack/placement-ff5468974-c5722" Oct 06 13:56:59 crc kubenswrapper[4757]: I1006 13:56:59.765415 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/afc51d15-69dd-4900-886c-29a4f372df24-combined-ca-bundle\") pod \"placement-ff5468974-c5722\" (UID: \"afc51d15-69dd-4900-886c-29a4f372df24\") " pod="openstack/placement-ff5468974-c5722" Oct 06 13:56:59 crc kubenswrapper[4757]: I1006 13:56:59.769865 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/afc51d15-69dd-4900-886c-29a4f372df24-public-tls-certs\") pod \"placement-ff5468974-c5722\" (UID: \"afc51d15-69dd-4900-886c-29a4f372df24\") " pod="openstack/placement-ff5468974-c5722" Oct 06 13:56:59 crc kubenswrapper[4757]: I1006 13:56:59.780248 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tcj28\" (UniqueName: \"kubernetes.io/projected/afc51d15-69dd-4900-886c-29a4f372df24-kube-api-access-tcj28\") pod \"placement-ff5468974-c5722\" (UID: \"afc51d15-69dd-4900-886c-29a4f372df24\") " pod="openstack/placement-ff5468974-c5722" Oct 06 13:56:59 crc kubenswrapper[4757]: I1006 13:56:59.849562 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-ff5468974-c5722" Oct 06 13:57:00 crc kubenswrapper[4757]: I1006 13:57:00.092899 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"cc62b42c-2672-4413-9768-4949a52c7659","Type":"ContainerStarted","Data":"fe001ee38925cca5382d9fe049d7e9db56dc87cbbb2fcf7ea31b331f5f614f78"} Oct 06 13:57:00 crc kubenswrapper[4757]: I1006 13:57:00.356483 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-ff5468974-c5722"] Oct 06 13:57:00 crc kubenswrapper[4757]: W1006 13:57:00.361643 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podafc51d15_69dd_4900_886c_29a4f372df24.slice/crio-273f3d77d426b220594a446856b22413f4c98ec6785cba3d65d9c9dcbc388906 WatchSource:0}: Error finding container 273f3d77d426b220594a446856b22413f4c98ec6785cba3d65d9c9dcbc388906: Status 404 returned error can't find the container with id 273f3d77d426b220594a446856b22413f4c98ec6785cba3d65d9c9dcbc388906 Oct 06 13:57:00 crc kubenswrapper[4757]: I1006 13:57:00.505359 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-v77kw" Oct 06 13:57:00 crc kubenswrapper[4757]: I1006 13:57:00.683270 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/76cdc407-bb9a-44dc-82eb-c9b083395c8d-fernet-keys\") pod \"76cdc407-bb9a-44dc-82eb-c9b083395c8d\" (UID: \"76cdc407-bb9a-44dc-82eb-c9b083395c8d\") " Oct 06 13:57:00 crc kubenswrapper[4757]: I1006 13:57:00.683818 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s9bt6\" (UniqueName: \"kubernetes.io/projected/76cdc407-bb9a-44dc-82eb-c9b083395c8d-kube-api-access-s9bt6\") pod \"76cdc407-bb9a-44dc-82eb-c9b083395c8d\" (UID: \"76cdc407-bb9a-44dc-82eb-c9b083395c8d\") " Oct 06 13:57:00 crc kubenswrapper[4757]: I1006 13:57:00.683916 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/76cdc407-bb9a-44dc-82eb-c9b083395c8d-scripts\") pod \"76cdc407-bb9a-44dc-82eb-c9b083395c8d\" (UID: \"76cdc407-bb9a-44dc-82eb-c9b083395c8d\") " Oct 06 13:57:00 crc kubenswrapper[4757]: I1006 13:57:00.683961 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/76cdc407-bb9a-44dc-82eb-c9b083395c8d-credential-keys\") pod \"76cdc407-bb9a-44dc-82eb-c9b083395c8d\" (UID: \"76cdc407-bb9a-44dc-82eb-c9b083395c8d\") " Oct 06 13:57:00 crc kubenswrapper[4757]: I1006 13:57:00.684009 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/76cdc407-bb9a-44dc-82eb-c9b083395c8d-combined-ca-bundle\") pod \"76cdc407-bb9a-44dc-82eb-c9b083395c8d\" (UID: \"76cdc407-bb9a-44dc-82eb-c9b083395c8d\") " Oct 06 13:57:00 crc kubenswrapper[4757]: I1006 13:57:00.684120 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/76cdc407-bb9a-44dc-82eb-c9b083395c8d-config-data\") pod \"76cdc407-bb9a-44dc-82eb-c9b083395c8d\" (UID: \"76cdc407-bb9a-44dc-82eb-c9b083395c8d\") " Oct 06 13:57:00 crc kubenswrapper[4757]: I1006 13:57:00.689621 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/76cdc407-bb9a-44dc-82eb-c9b083395c8d-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "76cdc407-bb9a-44dc-82eb-c9b083395c8d" (UID: "76cdc407-bb9a-44dc-82eb-c9b083395c8d"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:57:00 crc kubenswrapper[4757]: I1006 13:57:00.694155 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/76cdc407-bb9a-44dc-82eb-c9b083395c8d-scripts" (OuterVolumeSpecName: "scripts") pod "76cdc407-bb9a-44dc-82eb-c9b083395c8d" (UID: "76cdc407-bb9a-44dc-82eb-c9b083395c8d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:57:00 crc kubenswrapper[4757]: I1006 13:57:00.694406 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/76cdc407-bb9a-44dc-82eb-c9b083395c8d-kube-api-access-s9bt6" (OuterVolumeSpecName: "kube-api-access-s9bt6") pod "76cdc407-bb9a-44dc-82eb-c9b083395c8d" (UID: "76cdc407-bb9a-44dc-82eb-c9b083395c8d"). InnerVolumeSpecName "kube-api-access-s9bt6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:57:00 crc kubenswrapper[4757]: I1006 13:57:00.694958 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/76cdc407-bb9a-44dc-82eb-c9b083395c8d-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "76cdc407-bb9a-44dc-82eb-c9b083395c8d" (UID: "76cdc407-bb9a-44dc-82eb-c9b083395c8d"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:57:00 crc kubenswrapper[4757]: I1006 13:57:00.713899 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/76cdc407-bb9a-44dc-82eb-c9b083395c8d-config-data" (OuterVolumeSpecName: "config-data") pod "76cdc407-bb9a-44dc-82eb-c9b083395c8d" (UID: "76cdc407-bb9a-44dc-82eb-c9b083395c8d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:57:00 crc kubenswrapper[4757]: I1006 13:57:00.725669 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/76cdc407-bb9a-44dc-82eb-c9b083395c8d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "76cdc407-bb9a-44dc-82eb-c9b083395c8d" (UID: "76cdc407-bb9a-44dc-82eb-c9b083395c8d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:57:00 crc kubenswrapper[4757]: I1006 13:57:00.786654 4757 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/76cdc407-bb9a-44dc-82eb-c9b083395c8d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 06 13:57:00 crc kubenswrapper[4757]: I1006 13:57:00.786695 4757 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/76cdc407-bb9a-44dc-82eb-c9b083395c8d-config-data\") on node \"crc\" DevicePath \"\"" Oct 06 13:57:00 crc kubenswrapper[4757]: I1006 13:57:00.786706 4757 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/76cdc407-bb9a-44dc-82eb-c9b083395c8d-fernet-keys\") on node \"crc\" DevicePath \"\"" Oct 06 13:57:00 crc kubenswrapper[4757]: I1006 13:57:00.786718 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s9bt6\" (UniqueName: \"kubernetes.io/projected/76cdc407-bb9a-44dc-82eb-c9b083395c8d-kube-api-access-s9bt6\") on node \"crc\" DevicePath \"\"" Oct 06 13:57:00 crc kubenswrapper[4757]: I1006 13:57:00.786731 4757 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/76cdc407-bb9a-44dc-82eb-c9b083395c8d-scripts\") on node \"crc\" DevicePath \"\"" Oct 06 13:57:00 crc kubenswrapper[4757]: I1006 13:57:00.786742 4757 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/76cdc407-bb9a-44dc-82eb-c9b083395c8d-credential-keys\") on node \"crc\" DevicePath \"\"" Oct 06 13:57:01 crc kubenswrapper[4757]: I1006 13:57:01.087677 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-678769d845-d782m"] Oct 06 13:57:01 crc kubenswrapper[4757]: E1006 13:57:01.088182 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76cdc407-bb9a-44dc-82eb-c9b083395c8d" containerName="keystone-bootstrap" Oct 06 13:57:01 crc kubenswrapper[4757]: I1006 13:57:01.088208 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="76cdc407-bb9a-44dc-82eb-c9b083395c8d" containerName="keystone-bootstrap" Oct 06 13:57:01 crc kubenswrapper[4757]: I1006 13:57:01.088462 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="76cdc407-bb9a-44dc-82eb-c9b083395c8d" containerName="keystone-bootstrap" Oct 06 13:57:01 crc kubenswrapper[4757]: I1006 13:57:01.089263 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-678769d845-d782m" Oct 06 13:57:01 crc kubenswrapper[4757]: I1006 13:57:01.091815 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-public-svc" Oct 06 13:57:01 crc kubenswrapper[4757]: I1006 13:57:01.091908 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-internal-svc" Oct 06 13:57:01 crc kubenswrapper[4757]: I1006 13:57:01.107164 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-678769d845-d782m"] Oct 06 13:57:01 crc kubenswrapper[4757]: I1006 13:57:01.141045 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"cc62b42c-2672-4413-9768-4949a52c7659","Type":"ContainerStarted","Data":"3ef57524bb68d3a21d10583387c6e1165591d0cb469902192d8fe145b8a0a9db"} Oct 06 13:57:01 crc kubenswrapper[4757]: I1006 13:57:01.145485 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-v77kw" Oct 06 13:57:01 crc kubenswrapper[4757]: I1006 13:57:01.147260 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-v77kw" event={"ID":"76cdc407-bb9a-44dc-82eb-c9b083395c8d","Type":"ContainerDied","Data":"ef5fc8e1220fa912fb9cb4914f707483933fe1776f2a4225976e2ab361b1ce5a"} Oct 06 13:57:01 crc kubenswrapper[4757]: I1006 13:57:01.147295 4757 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ef5fc8e1220fa912fb9cb4914f707483933fe1776f2a4225976e2ab361b1ce5a" Oct 06 13:57:01 crc kubenswrapper[4757]: I1006 13:57:01.157201 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-ff5468974-c5722" event={"ID":"afc51d15-69dd-4900-886c-29a4f372df24","Type":"ContainerStarted","Data":"273f3d77d426b220594a446856b22413f4c98ec6785cba3d65d9c9dcbc388906"} Oct 06 13:57:01 crc kubenswrapper[4757]: I1006 13:57:01.194292 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/80bdd62a-4024-4734-9ca0-a97f2bae29c3-config-data\") pod \"keystone-678769d845-d782m\" (UID: \"80bdd62a-4024-4734-9ca0-a97f2bae29c3\") " pod="openstack/keystone-678769d845-d782m" Oct 06 13:57:01 crc kubenswrapper[4757]: I1006 13:57:01.194339 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rfbzb\" (UniqueName: \"kubernetes.io/projected/80bdd62a-4024-4734-9ca0-a97f2bae29c3-kube-api-access-rfbzb\") pod \"keystone-678769d845-d782m\" (UID: \"80bdd62a-4024-4734-9ca0-a97f2bae29c3\") " pod="openstack/keystone-678769d845-d782m" Oct 06 13:57:01 crc kubenswrapper[4757]: I1006 13:57:01.194369 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/80bdd62a-4024-4734-9ca0-a97f2bae29c3-internal-tls-certs\") pod \"keystone-678769d845-d782m\" (UID: \"80bdd62a-4024-4734-9ca0-a97f2bae29c3\") " pod="openstack/keystone-678769d845-d782m" Oct 06 13:57:01 crc kubenswrapper[4757]: I1006 13:57:01.194447 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/80bdd62a-4024-4734-9ca0-a97f2bae29c3-combined-ca-bundle\") pod \"keystone-678769d845-d782m\" (UID: \"80bdd62a-4024-4734-9ca0-a97f2bae29c3\") " pod="openstack/keystone-678769d845-d782m" Oct 06 13:57:01 crc kubenswrapper[4757]: I1006 13:57:01.194514 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/80bdd62a-4024-4734-9ca0-a97f2bae29c3-credential-keys\") pod \"keystone-678769d845-d782m\" (UID: \"80bdd62a-4024-4734-9ca0-a97f2bae29c3\") " pod="openstack/keystone-678769d845-d782m" Oct 06 13:57:01 crc kubenswrapper[4757]: I1006 13:57:01.194544 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/80bdd62a-4024-4734-9ca0-a97f2bae29c3-scripts\") pod \"keystone-678769d845-d782m\" (UID: \"80bdd62a-4024-4734-9ca0-a97f2bae29c3\") " pod="openstack/keystone-678769d845-d782m" Oct 06 13:57:01 crc kubenswrapper[4757]: I1006 13:57:01.194626 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/80bdd62a-4024-4734-9ca0-a97f2bae29c3-fernet-keys\") pod \"keystone-678769d845-d782m\" (UID: \"80bdd62a-4024-4734-9ca0-a97f2bae29c3\") " pod="openstack/keystone-678769d845-d782m" Oct 06 13:57:01 crc kubenswrapper[4757]: I1006 13:57:01.194690 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/80bdd62a-4024-4734-9ca0-a97f2bae29c3-public-tls-certs\") pod \"keystone-678769d845-d782m\" (UID: \"80bdd62a-4024-4734-9ca0-a97f2bae29c3\") " pod="openstack/keystone-678769d845-d782m" Oct 06 13:57:01 crc kubenswrapper[4757]: I1006 13:57:01.295724 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/80bdd62a-4024-4734-9ca0-a97f2bae29c3-config-data\") pod \"keystone-678769d845-d782m\" (UID: \"80bdd62a-4024-4734-9ca0-a97f2bae29c3\") " pod="openstack/keystone-678769d845-d782m" Oct 06 13:57:01 crc kubenswrapper[4757]: I1006 13:57:01.295769 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rfbzb\" (UniqueName: \"kubernetes.io/projected/80bdd62a-4024-4734-9ca0-a97f2bae29c3-kube-api-access-rfbzb\") pod \"keystone-678769d845-d782m\" (UID: \"80bdd62a-4024-4734-9ca0-a97f2bae29c3\") " pod="openstack/keystone-678769d845-d782m" Oct 06 13:57:01 crc kubenswrapper[4757]: I1006 13:57:01.295787 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/80bdd62a-4024-4734-9ca0-a97f2bae29c3-internal-tls-certs\") pod \"keystone-678769d845-d782m\" (UID: \"80bdd62a-4024-4734-9ca0-a97f2bae29c3\") " pod="openstack/keystone-678769d845-d782m" Oct 06 13:57:01 crc kubenswrapper[4757]: I1006 13:57:01.295822 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/80bdd62a-4024-4734-9ca0-a97f2bae29c3-combined-ca-bundle\") pod \"keystone-678769d845-d782m\" (UID: \"80bdd62a-4024-4734-9ca0-a97f2bae29c3\") " pod="openstack/keystone-678769d845-d782m" Oct 06 13:57:01 crc kubenswrapper[4757]: I1006 13:57:01.295855 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/80bdd62a-4024-4734-9ca0-a97f2bae29c3-credential-keys\") pod \"keystone-678769d845-d782m\" (UID: \"80bdd62a-4024-4734-9ca0-a97f2bae29c3\") " pod="openstack/keystone-678769d845-d782m" Oct 06 13:57:01 crc kubenswrapper[4757]: I1006 13:57:01.295884 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/80bdd62a-4024-4734-9ca0-a97f2bae29c3-scripts\") pod \"keystone-678769d845-d782m\" (UID: \"80bdd62a-4024-4734-9ca0-a97f2bae29c3\") " pod="openstack/keystone-678769d845-d782m" Oct 06 13:57:01 crc kubenswrapper[4757]: I1006 13:57:01.295915 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/80bdd62a-4024-4734-9ca0-a97f2bae29c3-fernet-keys\") pod \"keystone-678769d845-d782m\" (UID: \"80bdd62a-4024-4734-9ca0-a97f2bae29c3\") " pod="openstack/keystone-678769d845-d782m" Oct 06 13:57:01 crc kubenswrapper[4757]: I1006 13:57:01.295973 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/80bdd62a-4024-4734-9ca0-a97f2bae29c3-public-tls-certs\") pod \"keystone-678769d845-d782m\" (UID: \"80bdd62a-4024-4734-9ca0-a97f2bae29c3\") " pod="openstack/keystone-678769d845-d782m" Oct 06 13:57:01 crc kubenswrapper[4757]: I1006 13:57:01.299934 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/80bdd62a-4024-4734-9ca0-a97f2bae29c3-internal-tls-certs\") pod \"keystone-678769d845-d782m\" (UID: \"80bdd62a-4024-4734-9ca0-a97f2bae29c3\") " pod="openstack/keystone-678769d845-d782m" Oct 06 13:57:01 crc kubenswrapper[4757]: I1006 13:57:01.300035 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/80bdd62a-4024-4734-9ca0-a97f2bae29c3-config-data\") pod \"keystone-678769d845-d782m\" (UID: \"80bdd62a-4024-4734-9ca0-a97f2bae29c3\") " pod="openstack/keystone-678769d845-d782m" Oct 06 13:57:01 crc kubenswrapper[4757]: I1006 13:57:01.300227 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/80bdd62a-4024-4734-9ca0-a97f2bae29c3-credential-keys\") pod \"keystone-678769d845-d782m\" (UID: \"80bdd62a-4024-4734-9ca0-a97f2bae29c3\") " pod="openstack/keystone-678769d845-d782m" Oct 06 13:57:01 crc kubenswrapper[4757]: I1006 13:57:01.300347 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/80bdd62a-4024-4734-9ca0-a97f2bae29c3-public-tls-certs\") pod \"keystone-678769d845-d782m\" (UID: \"80bdd62a-4024-4734-9ca0-a97f2bae29c3\") " pod="openstack/keystone-678769d845-d782m" Oct 06 13:57:01 crc kubenswrapper[4757]: I1006 13:57:01.301806 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/80bdd62a-4024-4734-9ca0-a97f2bae29c3-scripts\") pod \"keystone-678769d845-d782m\" (UID: \"80bdd62a-4024-4734-9ca0-a97f2bae29c3\") " pod="openstack/keystone-678769d845-d782m" Oct 06 13:57:01 crc kubenswrapper[4757]: I1006 13:57:01.302195 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/80bdd62a-4024-4734-9ca0-a97f2bae29c3-fernet-keys\") pod \"keystone-678769d845-d782m\" (UID: \"80bdd62a-4024-4734-9ca0-a97f2bae29c3\") " pod="openstack/keystone-678769d845-d782m" Oct 06 13:57:01 crc kubenswrapper[4757]: I1006 13:57:01.304533 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/80bdd62a-4024-4734-9ca0-a97f2bae29c3-combined-ca-bundle\") pod \"keystone-678769d845-d782m\" (UID: \"80bdd62a-4024-4734-9ca0-a97f2bae29c3\") " pod="openstack/keystone-678769d845-d782m" Oct 06 13:57:01 crc kubenswrapper[4757]: I1006 13:57:01.311052 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rfbzb\" (UniqueName: \"kubernetes.io/projected/80bdd62a-4024-4734-9ca0-a97f2bae29c3-kube-api-access-rfbzb\") pod \"keystone-678769d845-d782m\" (UID: \"80bdd62a-4024-4734-9ca0-a97f2bae29c3\") " pod="openstack/keystone-678769d845-d782m" Oct 06 13:57:01 crc kubenswrapper[4757]: I1006 13:57:01.423952 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-678769d845-d782m" Oct 06 13:57:02 crc kubenswrapper[4757]: I1006 13:57:02.176881 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-ff5468974-c5722" event={"ID":"afc51d15-69dd-4900-886c-29a4f372df24","Type":"ContainerStarted","Data":"832893f98af9abff959fc064ee7dc85ca2245d5e8f5d1e911ffd6cf81dcf776b"} Oct 06 13:57:02 crc kubenswrapper[4757]: I1006 13:57:02.230691 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=8.230671928 podStartE2EDuration="8.230671928s" podCreationTimestamp="2025-10-06 13:56:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:57:02.204851836 +0000 UTC m=+1110.702170383" watchObservedRunningTime="2025-10-06 13:57:02.230671928 +0000 UTC m=+1110.727990475" Oct 06 13:57:02 crc kubenswrapper[4757]: I1006 13:57:02.237929 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-678769d845-d782m"] Oct 06 13:57:03 crc kubenswrapper[4757]: I1006 13:57:03.186780 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-678769d845-d782m" event={"ID":"80bdd62a-4024-4734-9ca0-a97f2bae29c3","Type":"ContainerStarted","Data":"09aeacef459637a0281dce4676bdc54627b7826e6ecae435591441cd59a1f043"} Oct 06 13:57:03 crc kubenswrapper[4757]: I1006 13:57:03.371725 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Oct 06 13:57:03 crc kubenswrapper[4757]: I1006 13:57:03.372129 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Oct 06 13:57:03 crc kubenswrapper[4757]: I1006 13:57:03.405196 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Oct 06 13:57:03 crc kubenswrapper[4757]: I1006 13:57:03.416781 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Oct 06 13:57:04 crc kubenswrapper[4757]: I1006 13:57:04.198041 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Oct 06 13:57:04 crc kubenswrapper[4757]: I1006 13:57:04.198107 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Oct 06 13:57:04 crc kubenswrapper[4757]: I1006 13:57:04.361234 4757 patch_prober.go:28] interesting pod/machine-config-daemon-7tb7h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 06 13:57:04 crc kubenswrapper[4757]: I1006 13:57:04.361290 4757 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 06 13:57:05 crc kubenswrapper[4757]: I1006 13:57:05.307467 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Oct 06 13:57:05 crc kubenswrapper[4757]: I1006 13:57:05.307880 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Oct 06 13:57:05 crc kubenswrapper[4757]: I1006 13:57:05.352372 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Oct 06 13:57:05 crc kubenswrapper[4757]: I1006 13:57:05.352482 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Oct 06 13:57:06 crc kubenswrapper[4757]: I1006 13:57:06.217352 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Oct 06 13:57:06 crc kubenswrapper[4757]: I1006 13:57:06.231566 4757 generic.go:334] "Generic (PLEG): container finished" podID="3bf60902-663f-4bbc-8415-691e6519a557" containerID="e4fa44361d718502c0ccce9f909093402d4d992d6d6aa62c7fa2758b3a57bf2f" exitCode=0 Oct 06 13:57:06 crc kubenswrapper[4757]: I1006 13:57:06.231679 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-6znnt" event={"ID":"3bf60902-663f-4bbc-8415-691e6519a557","Type":"ContainerDied","Data":"e4fa44361d718502c0ccce9f909093402d4d992d6d6aa62c7fa2758b3a57bf2f"} Oct 06 13:57:06 crc kubenswrapper[4757]: I1006 13:57:06.231815 4757 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 06 13:57:06 crc kubenswrapper[4757]: I1006 13:57:06.232278 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Oct 06 13:57:06 crc kubenswrapper[4757]: I1006 13:57:06.232321 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Oct 06 13:57:06 crc kubenswrapper[4757]: I1006 13:57:06.266440 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Oct 06 13:57:08 crc kubenswrapper[4757]: I1006 13:57:08.246347 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Oct 06 13:57:08 crc kubenswrapper[4757]: I1006 13:57:08.247572 4757 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 06 13:57:08 crc kubenswrapper[4757]: I1006 13:57:08.304524 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Oct 06 13:57:13 crc kubenswrapper[4757]: I1006 13:57:13.294707 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-6znnt" event={"ID":"3bf60902-663f-4bbc-8415-691e6519a557","Type":"ContainerDied","Data":"2c169df601c4a575e6fc745352a6a4857c4c0a067b07d09f5708ffaa4432af75"} Oct 06 13:57:13 crc kubenswrapper[4757]: I1006 13:57:13.295484 4757 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2c169df601c4a575e6fc745352a6a4857c4c0a067b07d09f5708ffaa4432af75" Oct 06 13:57:13 crc kubenswrapper[4757]: I1006 13:57:13.329534 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-6znnt" Oct 06 13:57:13 crc kubenswrapper[4757]: I1006 13:57:13.404680 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cm2jp\" (UniqueName: \"kubernetes.io/projected/3bf60902-663f-4bbc-8415-691e6519a557-kube-api-access-cm2jp\") pod \"3bf60902-663f-4bbc-8415-691e6519a557\" (UID: \"3bf60902-663f-4bbc-8415-691e6519a557\") " Oct 06 13:57:13 crc kubenswrapper[4757]: I1006 13:57:13.404811 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/3bf60902-663f-4bbc-8415-691e6519a557-db-sync-config-data\") pod \"3bf60902-663f-4bbc-8415-691e6519a557\" (UID: \"3bf60902-663f-4bbc-8415-691e6519a557\") " Oct 06 13:57:13 crc kubenswrapper[4757]: I1006 13:57:13.404889 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3bf60902-663f-4bbc-8415-691e6519a557-combined-ca-bundle\") pod \"3bf60902-663f-4bbc-8415-691e6519a557\" (UID: \"3bf60902-663f-4bbc-8415-691e6519a557\") " Oct 06 13:57:13 crc kubenswrapper[4757]: I1006 13:57:13.421924 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3bf60902-663f-4bbc-8415-691e6519a557-kube-api-access-cm2jp" (OuterVolumeSpecName: "kube-api-access-cm2jp") pod "3bf60902-663f-4bbc-8415-691e6519a557" (UID: "3bf60902-663f-4bbc-8415-691e6519a557"). InnerVolumeSpecName "kube-api-access-cm2jp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:57:13 crc kubenswrapper[4757]: I1006 13:57:13.423430 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3bf60902-663f-4bbc-8415-691e6519a557-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "3bf60902-663f-4bbc-8415-691e6519a557" (UID: "3bf60902-663f-4bbc-8415-691e6519a557"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:57:13 crc kubenswrapper[4757]: I1006 13:57:13.432910 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3bf60902-663f-4bbc-8415-691e6519a557-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3bf60902-663f-4bbc-8415-691e6519a557" (UID: "3bf60902-663f-4bbc-8415-691e6519a557"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:57:13 crc kubenswrapper[4757]: I1006 13:57:13.506537 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cm2jp\" (UniqueName: \"kubernetes.io/projected/3bf60902-663f-4bbc-8415-691e6519a557-kube-api-access-cm2jp\") on node \"crc\" DevicePath \"\"" Oct 06 13:57:13 crc kubenswrapper[4757]: I1006 13:57:13.506567 4757 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/3bf60902-663f-4bbc-8415-691e6519a557-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Oct 06 13:57:13 crc kubenswrapper[4757]: I1006 13:57:13.506576 4757 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3bf60902-663f-4bbc-8415-691e6519a557-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 06 13:57:14 crc kubenswrapper[4757]: I1006 13:57:14.303315 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-6znnt" Oct 06 13:57:14 crc kubenswrapper[4757]: E1006 13:57:14.571415 4757 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-cinder-api@sha256:b70574d6bbc2c83ca6de85f5da5bde65841ed526cc8b14199810218feca0f18b" Oct 06 13:57:14 crc kubenswrapper[4757]: E1006 13:57:14.571898 4757 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cinder-db-sync,Image:quay.io/podified-antelope-centos9/openstack-cinder-api@sha256:b70574d6bbc2c83ca6de85f5da5bde65841ed526cc8b14199810218feca0f18b,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-machine-id,ReadOnly:true,MountPath:/etc/machine-id,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/merged,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/cinder/cinder.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-fj4s7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-db-sync-4nqxd_openstack(f705f558-519e-489e-8ff4-5b3eb4476eff): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 06 13:57:14 crc kubenswrapper[4757]: E1006 13:57:14.573115 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cinder-db-sync-4nqxd" podUID="f705f558-519e-489e-8ff4-5b3eb4476eff" Oct 06 13:57:14 crc kubenswrapper[4757]: I1006 13:57:14.623832 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-996ccd7c9-wv7n4"] Oct 06 13:57:14 crc kubenswrapper[4757]: E1006 13:57:14.624262 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3bf60902-663f-4bbc-8415-691e6519a557" containerName="barbican-db-sync" Oct 06 13:57:14 crc kubenswrapper[4757]: I1006 13:57:14.624278 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="3bf60902-663f-4bbc-8415-691e6519a557" containerName="barbican-db-sync" Oct 06 13:57:14 crc kubenswrapper[4757]: I1006 13:57:14.624475 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="3bf60902-663f-4bbc-8415-691e6519a557" containerName="barbican-db-sync" Oct 06 13:57:14 crc kubenswrapper[4757]: I1006 13:57:14.633678 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-996ccd7c9-wv7n4" Oct 06 13:57:14 crc kubenswrapper[4757]: I1006 13:57:14.637494 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Oct 06 13:57:14 crc kubenswrapper[4757]: I1006 13:57:14.637689 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Oct 06 13:57:14 crc kubenswrapper[4757]: I1006 13:57:14.639016 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-6f4c564f58-t9kxj"] Oct 06 13:57:14 crc kubenswrapper[4757]: I1006 13:57:14.641427 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-kms2q" Oct 06 13:57:14 crc kubenswrapper[4757]: I1006 13:57:14.642204 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-6f4c564f58-t9kxj" Oct 06 13:57:14 crc kubenswrapper[4757]: I1006 13:57:14.644560 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Oct 06 13:57:14 crc kubenswrapper[4757]: I1006 13:57:14.654262 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-996ccd7c9-wv7n4"] Oct 06 13:57:14 crc kubenswrapper[4757]: I1006 13:57:14.693027 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-6f4c564f58-t9kxj"] Oct 06 13:57:14 crc kubenswrapper[4757]: I1006 13:57:14.728107 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/985540de-3212-41f4-a3a6-180ff5c4eda2-combined-ca-bundle\") pod \"barbican-keystone-listener-6f4c564f58-t9kxj\" (UID: \"985540de-3212-41f4-a3a6-180ff5c4eda2\") " pod="openstack/barbican-keystone-listener-6f4c564f58-t9kxj" Oct 06 13:57:14 crc kubenswrapper[4757]: I1006 13:57:14.728156 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/985540de-3212-41f4-a3a6-180ff5c4eda2-logs\") pod \"barbican-keystone-listener-6f4c564f58-t9kxj\" (UID: \"985540de-3212-41f4-a3a6-180ff5c4eda2\") " pod="openstack/barbican-keystone-listener-6f4c564f58-t9kxj" Oct 06 13:57:14 crc kubenswrapper[4757]: I1006 13:57:14.728202 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bn598\" (UniqueName: \"kubernetes.io/projected/3e6333c1-01c0-42fd-a75f-31a2c57e9db2-kube-api-access-bn598\") pod \"barbican-worker-996ccd7c9-wv7n4\" (UID: \"3e6333c1-01c0-42fd-a75f-31a2c57e9db2\") " pod="openstack/barbican-worker-996ccd7c9-wv7n4" Oct 06 13:57:14 crc kubenswrapper[4757]: I1006 13:57:14.728259 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3e6333c1-01c0-42fd-a75f-31a2c57e9db2-config-data\") pod \"barbican-worker-996ccd7c9-wv7n4\" (UID: \"3e6333c1-01c0-42fd-a75f-31a2c57e9db2\") " pod="openstack/barbican-worker-996ccd7c9-wv7n4" Oct 06 13:57:14 crc kubenswrapper[4757]: I1006 13:57:14.728272 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3e6333c1-01c0-42fd-a75f-31a2c57e9db2-logs\") pod \"barbican-worker-996ccd7c9-wv7n4\" (UID: \"3e6333c1-01c0-42fd-a75f-31a2c57e9db2\") " pod="openstack/barbican-worker-996ccd7c9-wv7n4" Oct 06 13:57:14 crc kubenswrapper[4757]: I1006 13:57:14.728312 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3e6333c1-01c0-42fd-a75f-31a2c57e9db2-config-data-custom\") pod \"barbican-worker-996ccd7c9-wv7n4\" (UID: \"3e6333c1-01c0-42fd-a75f-31a2c57e9db2\") " pod="openstack/barbican-worker-996ccd7c9-wv7n4" Oct 06 13:57:14 crc kubenswrapper[4757]: I1006 13:57:14.728346 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/985540de-3212-41f4-a3a6-180ff5c4eda2-config-data\") pod \"barbican-keystone-listener-6f4c564f58-t9kxj\" (UID: \"985540de-3212-41f4-a3a6-180ff5c4eda2\") " pod="openstack/barbican-keystone-listener-6f4c564f58-t9kxj" Oct 06 13:57:14 crc kubenswrapper[4757]: I1006 13:57:14.728373 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e6333c1-01c0-42fd-a75f-31a2c57e9db2-combined-ca-bundle\") pod \"barbican-worker-996ccd7c9-wv7n4\" (UID: \"3e6333c1-01c0-42fd-a75f-31a2c57e9db2\") " pod="openstack/barbican-worker-996ccd7c9-wv7n4" Oct 06 13:57:14 crc kubenswrapper[4757]: I1006 13:57:14.728389 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l5zw9\" (UniqueName: \"kubernetes.io/projected/985540de-3212-41f4-a3a6-180ff5c4eda2-kube-api-access-l5zw9\") pod \"barbican-keystone-listener-6f4c564f58-t9kxj\" (UID: \"985540de-3212-41f4-a3a6-180ff5c4eda2\") " pod="openstack/barbican-keystone-listener-6f4c564f58-t9kxj" Oct 06 13:57:14 crc kubenswrapper[4757]: I1006 13:57:14.728425 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/985540de-3212-41f4-a3a6-180ff5c4eda2-config-data-custom\") pod \"barbican-keystone-listener-6f4c564f58-t9kxj\" (UID: \"985540de-3212-41f4-a3a6-180ff5c4eda2\") " pod="openstack/barbican-keystone-listener-6f4c564f58-t9kxj" Oct 06 13:57:14 crc kubenswrapper[4757]: I1006 13:57:14.758005 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-84f6d7bf7f-57wpq"] Oct 06 13:57:14 crc kubenswrapper[4757]: I1006 13:57:14.759388 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84f6d7bf7f-57wpq" Oct 06 13:57:14 crc kubenswrapper[4757]: I1006 13:57:14.798620 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-84f6d7bf7f-57wpq"] Oct 06 13:57:14 crc kubenswrapper[4757]: I1006 13:57:14.830394 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3e6333c1-01c0-42fd-a75f-31a2c57e9db2-config-data-custom\") pod \"barbican-worker-996ccd7c9-wv7n4\" (UID: \"3e6333c1-01c0-42fd-a75f-31a2c57e9db2\") " pod="openstack/barbican-worker-996ccd7c9-wv7n4" Oct 06 13:57:14 crc kubenswrapper[4757]: I1006 13:57:14.830824 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fqmbm\" (UniqueName: \"kubernetes.io/projected/3cc03073-fe96-41a7-9b32-ad87247ed9b8-kube-api-access-fqmbm\") pod \"dnsmasq-dns-84f6d7bf7f-57wpq\" (UID: \"3cc03073-fe96-41a7-9b32-ad87247ed9b8\") " pod="openstack/dnsmasq-dns-84f6d7bf7f-57wpq" Oct 06 13:57:14 crc kubenswrapper[4757]: I1006 13:57:14.830861 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/985540de-3212-41f4-a3a6-180ff5c4eda2-config-data\") pod \"barbican-keystone-listener-6f4c564f58-t9kxj\" (UID: \"985540de-3212-41f4-a3a6-180ff5c4eda2\") " pod="openstack/barbican-keystone-listener-6f4c564f58-t9kxj" Oct 06 13:57:14 crc kubenswrapper[4757]: I1006 13:57:14.830898 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3cc03073-fe96-41a7-9b32-ad87247ed9b8-dns-swift-storage-0\") pod \"dnsmasq-dns-84f6d7bf7f-57wpq\" (UID: \"3cc03073-fe96-41a7-9b32-ad87247ed9b8\") " pod="openstack/dnsmasq-dns-84f6d7bf7f-57wpq" Oct 06 13:57:14 crc kubenswrapper[4757]: I1006 13:57:14.830917 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3cc03073-fe96-41a7-9b32-ad87247ed9b8-config\") pod \"dnsmasq-dns-84f6d7bf7f-57wpq\" (UID: \"3cc03073-fe96-41a7-9b32-ad87247ed9b8\") " pod="openstack/dnsmasq-dns-84f6d7bf7f-57wpq" Oct 06 13:57:14 crc kubenswrapper[4757]: I1006 13:57:14.830935 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e6333c1-01c0-42fd-a75f-31a2c57e9db2-combined-ca-bundle\") pod \"barbican-worker-996ccd7c9-wv7n4\" (UID: \"3e6333c1-01c0-42fd-a75f-31a2c57e9db2\") " pod="openstack/barbican-worker-996ccd7c9-wv7n4" Oct 06 13:57:14 crc kubenswrapper[4757]: I1006 13:57:14.830958 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l5zw9\" (UniqueName: \"kubernetes.io/projected/985540de-3212-41f4-a3a6-180ff5c4eda2-kube-api-access-l5zw9\") pod \"barbican-keystone-listener-6f4c564f58-t9kxj\" (UID: \"985540de-3212-41f4-a3a6-180ff5c4eda2\") " pod="openstack/barbican-keystone-listener-6f4c564f58-t9kxj" Oct 06 13:57:14 crc kubenswrapper[4757]: I1006 13:57:14.830992 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/985540de-3212-41f4-a3a6-180ff5c4eda2-config-data-custom\") pod \"barbican-keystone-listener-6f4c564f58-t9kxj\" (UID: \"985540de-3212-41f4-a3a6-180ff5c4eda2\") " pod="openstack/barbican-keystone-listener-6f4c564f58-t9kxj" Oct 06 13:57:14 crc kubenswrapper[4757]: I1006 13:57:14.831036 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/985540de-3212-41f4-a3a6-180ff5c4eda2-combined-ca-bundle\") pod \"barbican-keystone-listener-6f4c564f58-t9kxj\" (UID: \"985540de-3212-41f4-a3a6-180ff5c4eda2\") " pod="openstack/barbican-keystone-listener-6f4c564f58-t9kxj" Oct 06 13:57:14 crc kubenswrapper[4757]: I1006 13:57:14.831063 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/985540de-3212-41f4-a3a6-180ff5c4eda2-logs\") pod \"barbican-keystone-listener-6f4c564f58-t9kxj\" (UID: \"985540de-3212-41f4-a3a6-180ff5c4eda2\") " pod="openstack/barbican-keystone-listener-6f4c564f58-t9kxj" Oct 06 13:57:14 crc kubenswrapper[4757]: I1006 13:57:14.831083 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3cc03073-fe96-41a7-9b32-ad87247ed9b8-dns-svc\") pod \"dnsmasq-dns-84f6d7bf7f-57wpq\" (UID: \"3cc03073-fe96-41a7-9b32-ad87247ed9b8\") " pod="openstack/dnsmasq-dns-84f6d7bf7f-57wpq" Oct 06 13:57:14 crc kubenswrapper[4757]: I1006 13:57:14.831124 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3cc03073-fe96-41a7-9b32-ad87247ed9b8-ovsdbserver-sb\") pod \"dnsmasq-dns-84f6d7bf7f-57wpq\" (UID: \"3cc03073-fe96-41a7-9b32-ad87247ed9b8\") " pod="openstack/dnsmasq-dns-84f6d7bf7f-57wpq" Oct 06 13:57:14 crc kubenswrapper[4757]: I1006 13:57:14.831161 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bn598\" (UniqueName: \"kubernetes.io/projected/3e6333c1-01c0-42fd-a75f-31a2c57e9db2-kube-api-access-bn598\") pod \"barbican-worker-996ccd7c9-wv7n4\" (UID: \"3e6333c1-01c0-42fd-a75f-31a2c57e9db2\") " pod="openstack/barbican-worker-996ccd7c9-wv7n4" Oct 06 13:57:14 crc kubenswrapper[4757]: I1006 13:57:14.831210 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3e6333c1-01c0-42fd-a75f-31a2c57e9db2-logs\") pod \"barbican-worker-996ccd7c9-wv7n4\" (UID: \"3e6333c1-01c0-42fd-a75f-31a2c57e9db2\") " pod="openstack/barbican-worker-996ccd7c9-wv7n4" Oct 06 13:57:14 crc kubenswrapper[4757]: I1006 13:57:14.831227 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3e6333c1-01c0-42fd-a75f-31a2c57e9db2-config-data\") pod \"barbican-worker-996ccd7c9-wv7n4\" (UID: \"3e6333c1-01c0-42fd-a75f-31a2c57e9db2\") " pod="openstack/barbican-worker-996ccd7c9-wv7n4" Oct 06 13:57:14 crc kubenswrapper[4757]: I1006 13:57:14.831250 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3cc03073-fe96-41a7-9b32-ad87247ed9b8-ovsdbserver-nb\") pod \"dnsmasq-dns-84f6d7bf7f-57wpq\" (UID: \"3cc03073-fe96-41a7-9b32-ad87247ed9b8\") " pod="openstack/dnsmasq-dns-84f6d7bf7f-57wpq" Oct 06 13:57:14 crc kubenswrapper[4757]: I1006 13:57:14.836673 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/985540de-3212-41f4-a3a6-180ff5c4eda2-logs\") pod \"barbican-keystone-listener-6f4c564f58-t9kxj\" (UID: \"985540de-3212-41f4-a3a6-180ff5c4eda2\") " pod="openstack/barbican-keystone-listener-6f4c564f58-t9kxj" Oct 06 13:57:14 crc kubenswrapper[4757]: I1006 13:57:14.840256 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/985540de-3212-41f4-a3a6-180ff5c4eda2-combined-ca-bundle\") pod \"barbican-keystone-listener-6f4c564f58-t9kxj\" (UID: \"985540de-3212-41f4-a3a6-180ff5c4eda2\") " pod="openstack/barbican-keystone-listener-6f4c564f58-t9kxj" Oct 06 13:57:14 crc kubenswrapper[4757]: I1006 13:57:14.842258 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/985540de-3212-41f4-a3a6-180ff5c4eda2-config-data-custom\") pod \"barbican-keystone-listener-6f4c564f58-t9kxj\" (UID: \"985540de-3212-41f4-a3a6-180ff5c4eda2\") " pod="openstack/barbican-keystone-listener-6f4c564f58-t9kxj" Oct 06 13:57:14 crc kubenswrapper[4757]: I1006 13:57:14.842532 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/985540de-3212-41f4-a3a6-180ff5c4eda2-config-data\") pod \"barbican-keystone-listener-6f4c564f58-t9kxj\" (UID: \"985540de-3212-41f4-a3a6-180ff5c4eda2\") " pod="openstack/barbican-keystone-listener-6f4c564f58-t9kxj" Oct 06 13:57:14 crc kubenswrapper[4757]: I1006 13:57:14.846955 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3e6333c1-01c0-42fd-a75f-31a2c57e9db2-config-data-custom\") pod \"barbican-worker-996ccd7c9-wv7n4\" (UID: \"3e6333c1-01c0-42fd-a75f-31a2c57e9db2\") " pod="openstack/barbican-worker-996ccd7c9-wv7n4" Oct 06 13:57:14 crc kubenswrapper[4757]: I1006 13:57:14.853887 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3e6333c1-01c0-42fd-a75f-31a2c57e9db2-logs\") pod \"barbican-worker-996ccd7c9-wv7n4\" (UID: \"3e6333c1-01c0-42fd-a75f-31a2c57e9db2\") " pod="openstack/barbican-worker-996ccd7c9-wv7n4" Oct 06 13:57:14 crc kubenswrapper[4757]: I1006 13:57:14.857922 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l5zw9\" (UniqueName: \"kubernetes.io/projected/985540de-3212-41f4-a3a6-180ff5c4eda2-kube-api-access-l5zw9\") pod \"barbican-keystone-listener-6f4c564f58-t9kxj\" (UID: \"985540de-3212-41f4-a3a6-180ff5c4eda2\") " pod="openstack/barbican-keystone-listener-6f4c564f58-t9kxj" Oct 06 13:57:14 crc kubenswrapper[4757]: I1006 13:57:14.864804 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e6333c1-01c0-42fd-a75f-31a2c57e9db2-combined-ca-bundle\") pod \"barbican-worker-996ccd7c9-wv7n4\" (UID: \"3e6333c1-01c0-42fd-a75f-31a2c57e9db2\") " pod="openstack/barbican-worker-996ccd7c9-wv7n4" Oct 06 13:57:14 crc kubenswrapper[4757]: I1006 13:57:14.867247 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bn598\" (UniqueName: \"kubernetes.io/projected/3e6333c1-01c0-42fd-a75f-31a2c57e9db2-kube-api-access-bn598\") pod \"barbican-worker-996ccd7c9-wv7n4\" (UID: \"3e6333c1-01c0-42fd-a75f-31a2c57e9db2\") " pod="openstack/barbican-worker-996ccd7c9-wv7n4" Oct 06 13:57:14 crc kubenswrapper[4757]: I1006 13:57:14.873933 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3e6333c1-01c0-42fd-a75f-31a2c57e9db2-config-data\") pod \"barbican-worker-996ccd7c9-wv7n4\" (UID: \"3e6333c1-01c0-42fd-a75f-31a2c57e9db2\") " pod="openstack/barbican-worker-996ccd7c9-wv7n4" Oct 06 13:57:14 crc kubenswrapper[4757]: I1006 13:57:14.919300 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-646975b848-hkqzh"] Oct 06 13:57:14 crc kubenswrapper[4757]: I1006 13:57:14.920808 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-646975b848-hkqzh" Oct 06 13:57:14 crc kubenswrapper[4757]: I1006 13:57:14.923156 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Oct 06 13:57:14 crc kubenswrapper[4757]: I1006 13:57:14.932451 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3cc03073-fe96-41a7-9b32-ad87247ed9b8-ovsdbserver-nb\") pod \"dnsmasq-dns-84f6d7bf7f-57wpq\" (UID: \"3cc03073-fe96-41a7-9b32-ad87247ed9b8\") " pod="openstack/dnsmasq-dns-84f6d7bf7f-57wpq" Oct 06 13:57:14 crc kubenswrapper[4757]: I1006 13:57:14.932870 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4knrl\" (UniqueName: \"kubernetes.io/projected/fdb7d73e-b6bf-4276-8633-45746b12cc1b-kube-api-access-4knrl\") pod \"barbican-api-646975b848-hkqzh\" (UID: \"fdb7d73e-b6bf-4276-8633-45746b12cc1b\") " pod="openstack/barbican-api-646975b848-hkqzh" Oct 06 13:57:14 crc kubenswrapper[4757]: I1006 13:57:14.932975 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fqmbm\" (UniqueName: \"kubernetes.io/projected/3cc03073-fe96-41a7-9b32-ad87247ed9b8-kube-api-access-fqmbm\") pod \"dnsmasq-dns-84f6d7bf7f-57wpq\" (UID: \"3cc03073-fe96-41a7-9b32-ad87247ed9b8\") " pod="openstack/dnsmasq-dns-84f6d7bf7f-57wpq" Oct 06 13:57:14 crc kubenswrapper[4757]: I1006 13:57:14.933054 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fdb7d73e-b6bf-4276-8633-45746b12cc1b-config-data\") pod \"barbican-api-646975b848-hkqzh\" (UID: \"fdb7d73e-b6bf-4276-8633-45746b12cc1b\") " pod="openstack/barbican-api-646975b848-hkqzh" Oct 06 13:57:14 crc kubenswrapper[4757]: I1006 13:57:14.933267 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3cc03073-fe96-41a7-9b32-ad87247ed9b8-dns-swift-storage-0\") pod \"dnsmasq-dns-84f6d7bf7f-57wpq\" (UID: \"3cc03073-fe96-41a7-9b32-ad87247ed9b8\") " pod="openstack/dnsmasq-dns-84f6d7bf7f-57wpq" Oct 06 13:57:14 crc kubenswrapper[4757]: I1006 13:57:14.933354 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fdb7d73e-b6bf-4276-8633-45746b12cc1b-logs\") pod \"barbican-api-646975b848-hkqzh\" (UID: \"fdb7d73e-b6bf-4276-8633-45746b12cc1b\") " pod="openstack/barbican-api-646975b848-hkqzh" Oct 06 13:57:14 crc kubenswrapper[4757]: I1006 13:57:14.933421 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3cc03073-fe96-41a7-9b32-ad87247ed9b8-config\") pod \"dnsmasq-dns-84f6d7bf7f-57wpq\" (UID: \"3cc03073-fe96-41a7-9b32-ad87247ed9b8\") " pod="openstack/dnsmasq-dns-84f6d7bf7f-57wpq" Oct 06 13:57:14 crc kubenswrapper[4757]: I1006 13:57:14.933500 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fdb7d73e-b6bf-4276-8633-45746b12cc1b-combined-ca-bundle\") pod \"barbican-api-646975b848-hkqzh\" (UID: \"fdb7d73e-b6bf-4276-8633-45746b12cc1b\") " pod="openstack/barbican-api-646975b848-hkqzh" Oct 06 13:57:14 crc kubenswrapper[4757]: I1006 13:57:14.933632 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fdb7d73e-b6bf-4276-8633-45746b12cc1b-config-data-custom\") pod \"barbican-api-646975b848-hkqzh\" (UID: \"fdb7d73e-b6bf-4276-8633-45746b12cc1b\") " pod="openstack/barbican-api-646975b848-hkqzh" Oct 06 13:57:14 crc kubenswrapper[4757]: I1006 13:57:14.933727 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3cc03073-fe96-41a7-9b32-ad87247ed9b8-dns-svc\") pod \"dnsmasq-dns-84f6d7bf7f-57wpq\" (UID: \"3cc03073-fe96-41a7-9b32-ad87247ed9b8\") " pod="openstack/dnsmasq-dns-84f6d7bf7f-57wpq" Oct 06 13:57:14 crc kubenswrapper[4757]: I1006 13:57:14.933918 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3cc03073-fe96-41a7-9b32-ad87247ed9b8-ovsdbserver-sb\") pod \"dnsmasq-dns-84f6d7bf7f-57wpq\" (UID: \"3cc03073-fe96-41a7-9b32-ad87247ed9b8\") " pod="openstack/dnsmasq-dns-84f6d7bf7f-57wpq" Oct 06 13:57:14 crc kubenswrapper[4757]: I1006 13:57:14.934897 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3cc03073-fe96-41a7-9b32-ad87247ed9b8-ovsdbserver-sb\") pod \"dnsmasq-dns-84f6d7bf7f-57wpq\" (UID: \"3cc03073-fe96-41a7-9b32-ad87247ed9b8\") " pod="openstack/dnsmasq-dns-84f6d7bf7f-57wpq" Oct 06 13:57:14 crc kubenswrapper[4757]: I1006 13:57:14.935547 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3cc03073-fe96-41a7-9b32-ad87247ed9b8-ovsdbserver-nb\") pod \"dnsmasq-dns-84f6d7bf7f-57wpq\" (UID: \"3cc03073-fe96-41a7-9b32-ad87247ed9b8\") " pod="openstack/dnsmasq-dns-84f6d7bf7f-57wpq" Oct 06 13:57:14 crc kubenswrapper[4757]: I1006 13:57:14.937087 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3cc03073-fe96-41a7-9b32-ad87247ed9b8-dns-swift-storage-0\") pod \"dnsmasq-dns-84f6d7bf7f-57wpq\" (UID: \"3cc03073-fe96-41a7-9b32-ad87247ed9b8\") " pod="openstack/dnsmasq-dns-84f6d7bf7f-57wpq" Oct 06 13:57:14 crc kubenswrapper[4757]: I1006 13:57:14.937758 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3cc03073-fe96-41a7-9b32-ad87247ed9b8-config\") pod \"dnsmasq-dns-84f6d7bf7f-57wpq\" (UID: \"3cc03073-fe96-41a7-9b32-ad87247ed9b8\") " pod="openstack/dnsmasq-dns-84f6d7bf7f-57wpq" Oct 06 13:57:14 crc kubenswrapper[4757]: I1006 13:57:14.937641 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3cc03073-fe96-41a7-9b32-ad87247ed9b8-dns-svc\") pod \"dnsmasq-dns-84f6d7bf7f-57wpq\" (UID: \"3cc03073-fe96-41a7-9b32-ad87247ed9b8\") " pod="openstack/dnsmasq-dns-84f6d7bf7f-57wpq" Oct 06 13:57:14 crc kubenswrapper[4757]: I1006 13:57:14.944122 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-646975b848-hkqzh"] Oct 06 13:57:14 crc kubenswrapper[4757]: I1006 13:57:14.964177 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fqmbm\" (UniqueName: \"kubernetes.io/projected/3cc03073-fe96-41a7-9b32-ad87247ed9b8-kube-api-access-fqmbm\") pod \"dnsmasq-dns-84f6d7bf7f-57wpq\" (UID: \"3cc03073-fe96-41a7-9b32-ad87247ed9b8\") " pod="openstack/dnsmasq-dns-84f6d7bf7f-57wpq" Oct 06 13:57:15 crc kubenswrapper[4757]: I1006 13:57:15.035161 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fdb7d73e-b6bf-4276-8633-45746b12cc1b-config-data-custom\") pod \"barbican-api-646975b848-hkqzh\" (UID: \"fdb7d73e-b6bf-4276-8633-45746b12cc1b\") " pod="openstack/barbican-api-646975b848-hkqzh" Oct 06 13:57:15 crc kubenswrapper[4757]: I1006 13:57:15.035281 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4knrl\" (UniqueName: \"kubernetes.io/projected/fdb7d73e-b6bf-4276-8633-45746b12cc1b-kube-api-access-4knrl\") pod \"barbican-api-646975b848-hkqzh\" (UID: \"fdb7d73e-b6bf-4276-8633-45746b12cc1b\") " pod="openstack/barbican-api-646975b848-hkqzh" Oct 06 13:57:15 crc kubenswrapper[4757]: I1006 13:57:15.035320 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fdb7d73e-b6bf-4276-8633-45746b12cc1b-config-data\") pod \"barbican-api-646975b848-hkqzh\" (UID: \"fdb7d73e-b6bf-4276-8633-45746b12cc1b\") " pod="openstack/barbican-api-646975b848-hkqzh" Oct 06 13:57:15 crc kubenswrapper[4757]: I1006 13:57:15.035343 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fdb7d73e-b6bf-4276-8633-45746b12cc1b-logs\") pod \"barbican-api-646975b848-hkqzh\" (UID: \"fdb7d73e-b6bf-4276-8633-45746b12cc1b\") " pod="openstack/barbican-api-646975b848-hkqzh" Oct 06 13:57:15 crc kubenswrapper[4757]: I1006 13:57:15.035365 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fdb7d73e-b6bf-4276-8633-45746b12cc1b-combined-ca-bundle\") pod \"barbican-api-646975b848-hkqzh\" (UID: \"fdb7d73e-b6bf-4276-8633-45746b12cc1b\") " pod="openstack/barbican-api-646975b848-hkqzh" Oct 06 13:57:15 crc kubenswrapper[4757]: I1006 13:57:15.036069 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fdb7d73e-b6bf-4276-8633-45746b12cc1b-logs\") pod \"barbican-api-646975b848-hkqzh\" (UID: \"fdb7d73e-b6bf-4276-8633-45746b12cc1b\") " pod="openstack/barbican-api-646975b848-hkqzh" Oct 06 13:57:15 crc kubenswrapper[4757]: I1006 13:57:15.038886 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fdb7d73e-b6bf-4276-8633-45746b12cc1b-config-data-custom\") pod \"barbican-api-646975b848-hkqzh\" (UID: \"fdb7d73e-b6bf-4276-8633-45746b12cc1b\") " pod="openstack/barbican-api-646975b848-hkqzh" Oct 06 13:57:15 crc kubenswrapper[4757]: I1006 13:57:15.038905 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fdb7d73e-b6bf-4276-8633-45746b12cc1b-combined-ca-bundle\") pod \"barbican-api-646975b848-hkqzh\" (UID: \"fdb7d73e-b6bf-4276-8633-45746b12cc1b\") " pod="openstack/barbican-api-646975b848-hkqzh" Oct 06 13:57:15 crc kubenswrapper[4757]: I1006 13:57:15.040489 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fdb7d73e-b6bf-4276-8633-45746b12cc1b-config-data\") pod \"barbican-api-646975b848-hkqzh\" (UID: \"fdb7d73e-b6bf-4276-8633-45746b12cc1b\") " pod="openstack/barbican-api-646975b848-hkqzh" Oct 06 13:57:15 crc kubenswrapper[4757]: I1006 13:57:15.054526 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-996ccd7c9-wv7n4" Oct 06 13:57:15 crc kubenswrapper[4757]: I1006 13:57:15.058358 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-6f4c564f58-t9kxj" Oct 06 13:57:15 crc kubenswrapper[4757]: I1006 13:57:15.066721 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4knrl\" (UniqueName: \"kubernetes.io/projected/fdb7d73e-b6bf-4276-8633-45746b12cc1b-kube-api-access-4knrl\") pod \"barbican-api-646975b848-hkqzh\" (UID: \"fdb7d73e-b6bf-4276-8633-45746b12cc1b\") " pod="openstack/barbican-api-646975b848-hkqzh" Oct 06 13:57:15 crc kubenswrapper[4757]: I1006 13:57:15.078209 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84f6d7bf7f-57wpq" Oct 06 13:57:15 crc kubenswrapper[4757]: I1006 13:57:15.257627 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-646975b848-hkqzh" Oct 06 13:57:15 crc kubenswrapper[4757]: I1006 13:57:15.329852 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-678769d845-d782m" event={"ID":"80bdd62a-4024-4734-9ca0-a97f2bae29c3","Type":"ContainerStarted","Data":"31fe86cb318306616d39850b2094828652e5181bae98c6563c8e5d3d8c3552ab"} Oct 06 13:57:15 crc kubenswrapper[4757]: I1006 13:57:15.330929 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-678769d845-d782m" Oct 06 13:57:15 crc kubenswrapper[4757]: I1006 13:57:15.355002 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-678769d845-d782m" podStartSLOduration=14.354981782 podStartE2EDuration="14.354981782s" podCreationTimestamp="2025-10-06 13:57:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:57:15.344993025 +0000 UTC m=+1123.842311562" watchObservedRunningTime="2025-10-06 13:57:15.354981782 +0000 UTC m=+1123.852300319" Oct 06 13:57:15 crc kubenswrapper[4757]: I1006 13:57:15.358220 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-ff5468974-c5722" event={"ID":"afc51d15-69dd-4900-886c-29a4f372df24","Type":"ContainerStarted","Data":"501ec0020527b58bea34dc6aa1f5770171c8c96af98e8cc181a6f0a30690207d"} Oct 06 13:57:15 crc kubenswrapper[4757]: I1006 13:57:15.358956 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-ff5468974-c5722" Oct 06 13:57:15 crc kubenswrapper[4757]: I1006 13:57:15.359272 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-ff5468974-c5722" Oct 06 13:57:15 crc kubenswrapper[4757]: I1006 13:57:15.375541 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8","Type":"ContainerStarted","Data":"d50cf6da79f320cc02c7ebbf624e6b61f19b905001f2f9122545f8be6d6ea724"} Oct 06 13:57:15 crc kubenswrapper[4757]: E1006 13:57:15.385966 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-cinder-api@sha256:b70574d6bbc2c83ca6de85f5da5bde65841ed526cc8b14199810218feca0f18b\\\"\"" pod="openstack/cinder-db-sync-4nqxd" podUID="f705f558-519e-489e-8ff4-5b3eb4476eff" Oct 06 13:57:15 crc kubenswrapper[4757]: I1006 13:57:15.393846 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-ff5468974-c5722" podStartSLOduration=16.393826612 podStartE2EDuration="16.393826612s" podCreationTimestamp="2025-10-06 13:56:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:57:15.386776216 +0000 UTC m=+1123.884094763" watchObservedRunningTime="2025-10-06 13:57:15.393826612 +0000 UTC m=+1123.891145149" Oct 06 13:57:15 crc kubenswrapper[4757]: I1006 13:57:15.584134 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-996ccd7c9-wv7n4"] Oct 06 13:57:15 crc kubenswrapper[4757]: W1006 13:57:15.585642 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3e6333c1_01c0_42fd_a75f_31a2c57e9db2.slice/crio-c351b5e79eb368979f987d102b3982934f0a96d05220650140c53c997e64bc49 WatchSource:0}: Error finding container c351b5e79eb368979f987d102b3982934f0a96d05220650140c53c997e64bc49: Status 404 returned error can't find the container with id c351b5e79eb368979f987d102b3982934f0a96d05220650140c53c997e64bc49 Oct 06 13:57:15 crc kubenswrapper[4757]: I1006 13:57:15.695712 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-84f6d7bf7f-57wpq"] Oct 06 13:57:15 crc kubenswrapper[4757]: W1006 13:57:15.703205 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3cc03073_fe96_41a7_9b32_ad87247ed9b8.slice/crio-515b0eb200fc7341a864d080e4a735913eee30f31fa16815477cf5bfa0d8b97e WatchSource:0}: Error finding container 515b0eb200fc7341a864d080e4a735913eee30f31fa16815477cf5bfa0d8b97e: Status 404 returned error can't find the container with id 515b0eb200fc7341a864d080e4a735913eee30f31fa16815477cf5bfa0d8b97e Oct 06 13:57:15 crc kubenswrapper[4757]: I1006 13:57:15.717234 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-6f4c564f58-t9kxj"] Oct 06 13:57:15 crc kubenswrapper[4757]: W1006 13:57:15.841134 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfdb7d73e_b6bf_4276_8633_45746b12cc1b.slice/crio-f57026646b348655af0eb904c64c7f195c451c06c5a9de0d288719a738815953 WatchSource:0}: Error finding container f57026646b348655af0eb904c64c7f195c451c06c5a9de0d288719a738815953: Status 404 returned error can't find the container with id f57026646b348655af0eb904c64c7f195c451c06c5a9de0d288719a738815953 Oct 06 13:57:15 crc kubenswrapper[4757]: I1006 13:57:15.842075 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-646975b848-hkqzh"] Oct 06 13:57:16 crc kubenswrapper[4757]: I1006 13:57:16.396310 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-996ccd7c9-wv7n4" event={"ID":"3e6333c1-01c0-42fd-a75f-31a2c57e9db2","Type":"ContainerStarted","Data":"c351b5e79eb368979f987d102b3982934f0a96d05220650140c53c997e64bc49"} Oct 06 13:57:16 crc kubenswrapper[4757]: I1006 13:57:16.398251 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-6f4c564f58-t9kxj" event={"ID":"985540de-3212-41f4-a3a6-180ff5c4eda2","Type":"ContainerStarted","Data":"50035a0bd6893b397a6cb41f3bd4c88d6dca2b389500cf6b52e4bf691af6e8bd"} Oct 06 13:57:16 crc kubenswrapper[4757]: I1006 13:57:16.402793 4757 generic.go:334] "Generic (PLEG): container finished" podID="3cc03073-fe96-41a7-9b32-ad87247ed9b8" containerID="97dd337fe9b2389d0d437a588f41fe4346726d70f9efb19e303cfd124065fa80" exitCode=0 Oct 06 13:57:16 crc kubenswrapper[4757]: I1006 13:57:16.402893 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84f6d7bf7f-57wpq" event={"ID":"3cc03073-fe96-41a7-9b32-ad87247ed9b8","Type":"ContainerDied","Data":"97dd337fe9b2389d0d437a588f41fe4346726d70f9efb19e303cfd124065fa80"} Oct 06 13:57:16 crc kubenswrapper[4757]: I1006 13:57:16.402925 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84f6d7bf7f-57wpq" event={"ID":"3cc03073-fe96-41a7-9b32-ad87247ed9b8","Type":"ContainerStarted","Data":"515b0eb200fc7341a864d080e4a735913eee30f31fa16815477cf5bfa0d8b97e"} Oct 06 13:57:16 crc kubenswrapper[4757]: I1006 13:57:16.413081 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-646975b848-hkqzh" event={"ID":"fdb7d73e-b6bf-4276-8633-45746b12cc1b","Type":"ContainerStarted","Data":"c0391f98b3cd9f27e73aab04396f1562344e2cb3a623f09fde302a4ff18f4572"} Oct 06 13:57:16 crc kubenswrapper[4757]: I1006 13:57:16.413140 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-646975b848-hkqzh" event={"ID":"fdb7d73e-b6bf-4276-8633-45746b12cc1b","Type":"ContainerStarted","Data":"a6499787e9bc87b23f95ff3ab051f4595b715c4dc4aa887df5be7685992688ae"} Oct 06 13:57:16 crc kubenswrapper[4757]: I1006 13:57:16.413151 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-646975b848-hkqzh" event={"ID":"fdb7d73e-b6bf-4276-8633-45746b12cc1b","Type":"ContainerStarted","Data":"f57026646b348655af0eb904c64c7f195c451c06c5a9de0d288719a738815953"} Oct 06 13:57:16 crc kubenswrapper[4757]: I1006 13:57:16.413209 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-646975b848-hkqzh" Oct 06 13:57:16 crc kubenswrapper[4757]: I1006 13:57:16.413920 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-646975b848-hkqzh" Oct 06 13:57:16 crc kubenswrapper[4757]: I1006 13:57:16.467463 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-646975b848-hkqzh" podStartSLOduration=2.467438209 podStartE2EDuration="2.467438209s" podCreationTimestamp="2025-10-06 13:57:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:57:16.457485945 +0000 UTC m=+1124.954804482" watchObservedRunningTime="2025-10-06 13:57:16.467438209 +0000 UTC m=+1124.964756746" Oct 06 13:57:16 crc kubenswrapper[4757]: I1006 13:57:16.713661 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-ff5468974-c5722" Oct 06 13:57:17 crc kubenswrapper[4757]: I1006 13:57:17.421522 4757 generic.go:334] "Generic (PLEG): container finished" podID="434206fd-9f64-4c9d-a528-55d9361dad92" containerID="aa3678cfeaf79fb26c4c0f487ec077377631d000fb2a222d822e9b0d65cb8401" exitCode=0 Oct 06 13:57:17 crc kubenswrapper[4757]: I1006 13:57:17.421746 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-kjpw4" event={"ID":"434206fd-9f64-4c9d-a528-55d9361dad92","Type":"ContainerDied","Data":"aa3678cfeaf79fb26c4c0f487ec077377631d000fb2a222d822e9b0d65cb8401"} Oct 06 13:57:17 crc kubenswrapper[4757]: I1006 13:57:17.425804 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84f6d7bf7f-57wpq" event={"ID":"3cc03073-fe96-41a7-9b32-ad87247ed9b8","Type":"ContainerStarted","Data":"79083e7a545ad51132722ba67b692439220b962aba079110022d05f672e5fff2"} Oct 06 13:57:17 crc kubenswrapper[4757]: I1006 13:57:17.426579 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-84f6d7bf7f-57wpq" Oct 06 13:57:17 crc kubenswrapper[4757]: I1006 13:57:17.617248 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-84f6d7bf7f-57wpq" podStartSLOduration=3.617227512 podStartE2EDuration="3.617227512s" podCreationTimestamp="2025-10-06 13:57:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:57:17.475291772 +0000 UTC m=+1125.972610329" watchObservedRunningTime="2025-10-06 13:57:17.617227512 +0000 UTC m=+1126.114546049" Oct 06 13:57:17 crc kubenswrapper[4757]: I1006 13:57:17.629368 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-5c78d5595b-89h8z"] Oct 06 13:57:17 crc kubenswrapper[4757]: I1006 13:57:17.635770 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5c78d5595b-89h8z" Oct 06 13:57:17 crc kubenswrapper[4757]: I1006 13:57:17.644308 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-internal-svc" Oct 06 13:57:17 crc kubenswrapper[4757]: I1006 13:57:17.644424 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-public-svc" Oct 06 13:57:17 crc kubenswrapper[4757]: I1006 13:57:17.672229 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-5c78d5595b-89h8z"] Oct 06 13:57:17 crc kubenswrapper[4757]: I1006 13:57:17.794682 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ac42434c-8367-4cf2-9134-2d85444f90f4-internal-tls-certs\") pod \"barbican-api-5c78d5595b-89h8z\" (UID: \"ac42434c-8367-4cf2-9134-2d85444f90f4\") " pod="openstack/barbican-api-5c78d5595b-89h8z" Oct 06 13:57:17 crc kubenswrapper[4757]: I1006 13:57:17.794786 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ac42434c-8367-4cf2-9134-2d85444f90f4-config-data-custom\") pod \"barbican-api-5c78d5595b-89h8z\" (UID: \"ac42434c-8367-4cf2-9134-2d85444f90f4\") " pod="openstack/barbican-api-5c78d5595b-89h8z" Oct 06 13:57:17 crc kubenswrapper[4757]: I1006 13:57:17.794924 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ac42434c-8367-4cf2-9134-2d85444f90f4-combined-ca-bundle\") pod \"barbican-api-5c78d5595b-89h8z\" (UID: \"ac42434c-8367-4cf2-9134-2d85444f90f4\") " pod="openstack/barbican-api-5c78d5595b-89h8z" Oct 06 13:57:17 crc kubenswrapper[4757]: I1006 13:57:17.795128 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ac42434c-8367-4cf2-9134-2d85444f90f4-public-tls-certs\") pod \"barbican-api-5c78d5595b-89h8z\" (UID: \"ac42434c-8367-4cf2-9134-2d85444f90f4\") " pod="openstack/barbican-api-5c78d5595b-89h8z" Oct 06 13:57:17 crc kubenswrapper[4757]: I1006 13:57:17.795149 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ac42434c-8367-4cf2-9134-2d85444f90f4-config-data\") pod \"barbican-api-5c78d5595b-89h8z\" (UID: \"ac42434c-8367-4cf2-9134-2d85444f90f4\") " pod="openstack/barbican-api-5c78d5595b-89h8z" Oct 06 13:57:17 crc kubenswrapper[4757]: I1006 13:57:17.795200 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-96c87\" (UniqueName: \"kubernetes.io/projected/ac42434c-8367-4cf2-9134-2d85444f90f4-kube-api-access-96c87\") pod \"barbican-api-5c78d5595b-89h8z\" (UID: \"ac42434c-8367-4cf2-9134-2d85444f90f4\") " pod="openstack/barbican-api-5c78d5595b-89h8z" Oct 06 13:57:17 crc kubenswrapper[4757]: I1006 13:57:17.795296 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ac42434c-8367-4cf2-9134-2d85444f90f4-logs\") pod \"barbican-api-5c78d5595b-89h8z\" (UID: \"ac42434c-8367-4cf2-9134-2d85444f90f4\") " pod="openstack/barbican-api-5c78d5595b-89h8z" Oct 06 13:57:17 crc kubenswrapper[4757]: I1006 13:57:17.897309 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ac42434c-8367-4cf2-9134-2d85444f90f4-public-tls-certs\") pod \"barbican-api-5c78d5595b-89h8z\" (UID: \"ac42434c-8367-4cf2-9134-2d85444f90f4\") " pod="openstack/barbican-api-5c78d5595b-89h8z" Oct 06 13:57:17 crc kubenswrapper[4757]: I1006 13:57:17.897385 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ac42434c-8367-4cf2-9134-2d85444f90f4-config-data\") pod \"barbican-api-5c78d5595b-89h8z\" (UID: \"ac42434c-8367-4cf2-9134-2d85444f90f4\") " pod="openstack/barbican-api-5c78d5595b-89h8z" Oct 06 13:57:17 crc kubenswrapper[4757]: I1006 13:57:17.897460 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-96c87\" (UniqueName: \"kubernetes.io/projected/ac42434c-8367-4cf2-9134-2d85444f90f4-kube-api-access-96c87\") pod \"barbican-api-5c78d5595b-89h8z\" (UID: \"ac42434c-8367-4cf2-9134-2d85444f90f4\") " pod="openstack/barbican-api-5c78d5595b-89h8z" Oct 06 13:57:17 crc kubenswrapper[4757]: I1006 13:57:17.897571 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ac42434c-8367-4cf2-9134-2d85444f90f4-logs\") pod \"barbican-api-5c78d5595b-89h8z\" (UID: \"ac42434c-8367-4cf2-9134-2d85444f90f4\") " pod="openstack/barbican-api-5c78d5595b-89h8z" Oct 06 13:57:17 crc kubenswrapper[4757]: I1006 13:57:17.897644 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ac42434c-8367-4cf2-9134-2d85444f90f4-internal-tls-certs\") pod \"barbican-api-5c78d5595b-89h8z\" (UID: \"ac42434c-8367-4cf2-9134-2d85444f90f4\") " pod="openstack/barbican-api-5c78d5595b-89h8z" Oct 06 13:57:17 crc kubenswrapper[4757]: I1006 13:57:17.897699 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ac42434c-8367-4cf2-9134-2d85444f90f4-config-data-custom\") pod \"barbican-api-5c78d5595b-89h8z\" (UID: \"ac42434c-8367-4cf2-9134-2d85444f90f4\") " pod="openstack/barbican-api-5c78d5595b-89h8z" Oct 06 13:57:17 crc kubenswrapper[4757]: I1006 13:57:17.897797 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ac42434c-8367-4cf2-9134-2d85444f90f4-combined-ca-bundle\") pod \"barbican-api-5c78d5595b-89h8z\" (UID: \"ac42434c-8367-4cf2-9134-2d85444f90f4\") " pod="openstack/barbican-api-5c78d5595b-89h8z" Oct 06 13:57:17 crc kubenswrapper[4757]: I1006 13:57:17.898846 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ac42434c-8367-4cf2-9134-2d85444f90f4-logs\") pod \"barbican-api-5c78d5595b-89h8z\" (UID: \"ac42434c-8367-4cf2-9134-2d85444f90f4\") " pod="openstack/barbican-api-5c78d5595b-89h8z" Oct 06 13:57:17 crc kubenswrapper[4757]: I1006 13:57:17.912852 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ac42434c-8367-4cf2-9134-2d85444f90f4-combined-ca-bundle\") pod \"barbican-api-5c78d5595b-89h8z\" (UID: \"ac42434c-8367-4cf2-9134-2d85444f90f4\") " pod="openstack/barbican-api-5c78d5595b-89h8z" Oct 06 13:57:17 crc kubenswrapper[4757]: I1006 13:57:17.913381 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ac42434c-8367-4cf2-9134-2d85444f90f4-config-data\") pod \"barbican-api-5c78d5595b-89h8z\" (UID: \"ac42434c-8367-4cf2-9134-2d85444f90f4\") " pod="openstack/barbican-api-5c78d5595b-89h8z" Oct 06 13:57:17 crc kubenswrapper[4757]: I1006 13:57:17.915442 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-96c87\" (UniqueName: \"kubernetes.io/projected/ac42434c-8367-4cf2-9134-2d85444f90f4-kube-api-access-96c87\") pod \"barbican-api-5c78d5595b-89h8z\" (UID: \"ac42434c-8367-4cf2-9134-2d85444f90f4\") " pod="openstack/barbican-api-5c78d5595b-89h8z" Oct 06 13:57:17 crc kubenswrapper[4757]: I1006 13:57:17.924480 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ac42434c-8367-4cf2-9134-2d85444f90f4-internal-tls-certs\") pod \"barbican-api-5c78d5595b-89h8z\" (UID: \"ac42434c-8367-4cf2-9134-2d85444f90f4\") " pod="openstack/barbican-api-5c78d5595b-89h8z" Oct 06 13:57:17 crc kubenswrapper[4757]: I1006 13:57:17.925189 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ac42434c-8367-4cf2-9134-2d85444f90f4-config-data-custom\") pod \"barbican-api-5c78d5595b-89h8z\" (UID: \"ac42434c-8367-4cf2-9134-2d85444f90f4\") " pod="openstack/barbican-api-5c78d5595b-89h8z" Oct 06 13:57:17 crc kubenswrapper[4757]: I1006 13:57:17.925471 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ac42434c-8367-4cf2-9134-2d85444f90f4-public-tls-certs\") pod \"barbican-api-5c78d5595b-89h8z\" (UID: \"ac42434c-8367-4cf2-9134-2d85444f90f4\") " pod="openstack/barbican-api-5c78d5595b-89h8z" Oct 06 13:57:17 crc kubenswrapper[4757]: I1006 13:57:17.957701 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5c78d5595b-89h8z" Oct 06 13:57:18 crc kubenswrapper[4757]: I1006 13:57:18.759771 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-kjpw4" Oct 06 13:57:18 crc kubenswrapper[4757]: I1006 13:57:18.915466 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hscps\" (UniqueName: \"kubernetes.io/projected/434206fd-9f64-4c9d-a528-55d9361dad92-kube-api-access-hscps\") pod \"434206fd-9f64-4c9d-a528-55d9361dad92\" (UID: \"434206fd-9f64-4c9d-a528-55d9361dad92\") " Oct 06 13:57:18 crc kubenswrapper[4757]: I1006 13:57:18.915526 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/434206fd-9f64-4c9d-a528-55d9361dad92-config\") pod \"434206fd-9f64-4c9d-a528-55d9361dad92\" (UID: \"434206fd-9f64-4c9d-a528-55d9361dad92\") " Oct 06 13:57:18 crc kubenswrapper[4757]: I1006 13:57:18.915713 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/434206fd-9f64-4c9d-a528-55d9361dad92-combined-ca-bundle\") pod \"434206fd-9f64-4c9d-a528-55d9361dad92\" (UID: \"434206fd-9f64-4c9d-a528-55d9361dad92\") " Oct 06 13:57:18 crc kubenswrapper[4757]: I1006 13:57:18.926562 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/434206fd-9f64-4c9d-a528-55d9361dad92-kube-api-access-hscps" (OuterVolumeSpecName: "kube-api-access-hscps") pod "434206fd-9f64-4c9d-a528-55d9361dad92" (UID: "434206fd-9f64-4c9d-a528-55d9361dad92"). InnerVolumeSpecName "kube-api-access-hscps". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:57:18 crc kubenswrapper[4757]: I1006 13:57:18.955184 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/434206fd-9f64-4c9d-a528-55d9361dad92-config" (OuterVolumeSpecName: "config") pod "434206fd-9f64-4c9d-a528-55d9361dad92" (UID: "434206fd-9f64-4c9d-a528-55d9361dad92"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:57:18 crc kubenswrapper[4757]: I1006 13:57:18.968632 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/434206fd-9f64-4c9d-a528-55d9361dad92-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "434206fd-9f64-4c9d-a528-55d9361dad92" (UID: "434206fd-9f64-4c9d-a528-55d9361dad92"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:57:18 crc kubenswrapper[4757]: I1006 13:57:18.995140 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-5c78d5595b-89h8z"] Oct 06 13:57:19 crc kubenswrapper[4757]: I1006 13:57:19.018279 4757 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/434206fd-9f64-4c9d-a528-55d9361dad92-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 06 13:57:19 crc kubenswrapper[4757]: I1006 13:57:19.018319 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hscps\" (UniqueName: \"kubernetes.io/projected/434206fd-9f64-4c9d-a528-55d9361dad92-kube-api-access-hscps\") on node \"crc\" DevicePath \"\"" Oct 06 13:57:19 crc kubenswrapper[4757]: I1006 13:57:19.018353 4757 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/434206fd-9f64-4c9d-a528-55d9361dad92-config\") on node \"crc\" DevicePath \"\"" Oct 06 13:57:19 crc kubenswrapper[4757]: I1006 13:57:19.454743 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-996ccd7c9-wv7n4" event={"ID":"3e6333c1-01c0-42fd-a75f-31a2c57e9db2","Type":"ContainerStarted","Data":"7ca2f658b6761b42b567741f568c193b4f94743723c1e6c585be5cb9f29980bb"} Oct 06 13:57:19 crc kubenswrapper[4757]: I1006 13:57:19.454798 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-996ccd7c9-wv7n4" event={"ID":"3e6333c1-01c0-42fd-a75f-31a2c57e9db2","Type":"ContainerStarted","Data":"b87d6d10ad564103f56d850792c4af8bcf31280f5d0ac840a8140a30f0613988"} Oct 06 13:57:19 crc kubenswrapper[4757]: I1006 13:57:19.473125 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-kjpw4" event={"ID":"434206fd-9f64-4c9d-a528-55d9361dad92","Type":"ContainerDied","Data":"93a97935600a742251eff99e1f8f114cc37502cd986c26b545d6fae8e34bac58"} Oct 06 13:57:19 crc kubenswrapper[4757]: I1006 13:57:19.473172 4757 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="93a97935600a742251eff99e1f8f114cc37502cd986c26b545d6fae8e34bac58" Oct 06 13:57:19 crc kubenswrapper[4757]: I1006 13:57:19.473226 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-kjpw4" Oct 06 13:57:19 crc kubenswrapper[4757]: I1006 13:57:19.500031 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-6f4c564f58-t9kxj" event={"ID":"985540de-3212-41f4-a3a6-180ff5c4eda2","Type":"ContainerStarted","Data":"103f1cb6b966566428982058c2497d82d3412e2a56a570fe97a8556bbaf90b4e"} Oct 06 13:57:19 crc kubenswrapper[4757]: I1006 13:57:19.500068 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-6f4c564f58-t9kxj" event={"ID":"985540de-3212-41f4-a3a6-180ff5c4eda2","Type":"ContainerStarted","Data":"8657327aee1067ffca05e4e9c577e2dddc6b63823ba1e0a17d6ec6a140fc3cc0"} Oct 06 13:57:19 crc kubenswrapper[4757]: I1006 13:57:19.510029 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-996ccd7c9-wv7n4" podStartSLOduration=2.630510176 podStartE2EDuration="5.51000187s" podCreationTimestamp="2025-10-06 13:57:14 +0000 UTC" firstStartedPulling="2025-10-06 13:57:15.588669532 +0000 UTC m=+1124.085988069" lastFinishedPulling="2025-10-06 13:57:18.468161226 +0000 UTC m=+1126.965479763" observedRunningTime="2025-10-06 13:57:19.480903778 +0000 UTC m=+1127.978222315" watchObservedRunningTime="2025-10-06 13:57:19.51000187 +0000 UTC m=+1128.007320407" Oct 06 13:57:19 crc kubenswrapper[4757]: I1006 13:57:19.526035 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-6f4c564f58-t9kxj" podStartSLOduration=2.7904391779999997 podStartE2EDuration="5.526016951s" podCreationTimestamp="2025-10-06 13:57:14 +0000 UTC" firstStartedPulling="2025-10-06 13:57:15.730688964 +0000 UTC m=+1124.228007501" lastFinishedPulling="2025-10-06 13:57:18.466266737 +0000 UTC m=+1126.963585274" observedRunningTime="2025-10-06 13:57:19.522511783 +0000 UTC m=+1128.019830340" watchObservedRunningTime="2025-10-06 13:57:19.526016951 +0000 UTC m=+1128.023335488" Oct 06 13:57:19 crc kubenswrapper[4757]: I1006 13:57:19.632737 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-84f6d7bf7f-57wpq"] Oct 06 13:57:19 crc kubenswrapper[4757]: I1006 13:57:19.718322 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6fc489599-7rtht"] Oct 06 13:57:19 crc kubenswrapper[4757]: E1006 13:57:19.718696 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="434206fd-9f64-4c9d-a528-55d9361dad92" containerName="neutron-db-sync" Oct 06 13:57:19 crc kubenswrapper[4757]: I1006 13:57:19.718707 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="434206fd-9f64-4c9d-a528-55d9361dad92" containerName="neutron-db-sync" Oct 06 13:57:19 crc kubenswrapper[4757]: I1006 13:57:19.718870 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="434206fd-9f64-4c9d-a528-55d9361dad92" containerName="neutron-db-sync" Oct 06 13:57:19 crc kubenswrapper[4757]: I1006 13:57:19.719737 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6fc489599-7rtht" Oct 06 13:57:19 crc kubenswrapper[4757]: I1006 13:57:19.746350 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6fc489599-7rtht"] Oct 06 13:57:19 crc kubenswrapper[4757]: I1006 13:57:19.838061 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/25ca3ce2-1e8e-4cab-b349-84fb82c52a56-config\") pod \"dnsmasq-dns-6fc489599-7rtht\" (UID: \"25ca3ce2-1e8e-4cab-b349-84fb82c52a56\") " pod="openstack/dnsmasq-dns-6fc489599-7rtht" Oct 06 13:57:19 crc kubenswrapper[4757]: I1006 13:57:19.838148 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/25ca3ce2-1e8e-4cab-b349-84fb82c52a56-dns-svc\") pod \"dnsmasq-dns-6fc489599-7rtht\" (UID: \"25ca3ce2-1e8e-4cab-b349-84fb82c52a56\") " pod="openstack/dnsmasq-dns-6fc489599-7rtht" Oct 06 13:57:19 crc kubenswrapper[4757]: I1006 13:57:19.838168 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-js9bh\" (UniqueName: \"kubernetes.io/projected/25ca3ce2-1e8e-4cab-b349-84fb82c52a56-kube-api-access-js9bh\") pod \"dnsmasq-dns-6fc489599-7rtht\" (UID: \"25ca3ce2-1e8e-4cab-b349-84fb82c52a56\") " pod="openstack/dnsmasq-dns-6fc489599-7rtht" Oct 06 13:57:19 crc kubenswrapper[4757]: I1006 13:57:19.838256 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/25ca3ce2-1e8e-4cab-b349-84fb82c52a56-ovsdbserver-sb\") pod \"dnsmasq-dns-6fc489599-7rtht\" (UID: \"25ca3ce2-1e8e-4cab-b349-84fb82c52a56\") " pod="openstack/dnsmasq-dns-6fc489599-7rtht" Oct 06 13:57:19 crc kubenswrapper[4757]: I1006 13:57:19.838275 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/25ca3ce2-1e8e-4cab-b349-84fb82c52a56-ovsdbserver-nb\") pod \"dnsmasq-dns-6fc489599-7rtht\" (UID: \"25ca3ce2-1e8e-4cab-b349-84fb82c52a56\") " pod="openstack/dnsmasq-dns-6fc489599-7rtht" Oct 06 13:57:19 crc kubenswrapper[4757]: I1006 13:57:19.838320 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/25ca3ce2-1e8e-4cab-b349-84fb82c52a56-dns-swift-storage-0\") pod \"dnsmasq-dns-6fc489599-7rtht\" (UID: \"25ca3ce2-1e8e-4cab-b349-84fb82c52a56\") " pod="openstack/dnsmasq-dns-6fc489599-7rtht" Oct 06 13:57:19 crc kubenswrapper[4757]: I1006 13:57:19.874073 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-688cb45c44-gmn5j"] Oct 06 13:57:19 crc kubenswrapper[4757]: I1006 13:57:19.876298 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-688cb45c44-gmn5j" Oct 06 13:57:19 crc kubenswrapper[4757]: I1006 13:57:19.880259 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-4x8qm" Oct 06 13:57:19 crc kubenswrapper[4757]: I1006 13:57:19.880479 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Oct 06 13:57:19 crc kubenswrapper[4757]: I1006 13:57:19.880663 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Oct 06 13:57:19 crc kubenswrapper[4757]: I1006 13:57:19.880781 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-ovndbs" Oct 06 13:57:19 crc kubenswrapper[4757]: I1006 13:57:19.906122 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-688cb45c44-gmn5j"] Oct 06 13:57:19 crc kubenswrapper[4757]: I1006 13:57:19.943304 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cv458\" (UniqueName: \"kubernetes.io/projected/2c70918c-e944-47ef-8c0c-bbf476d75a77-kube-api-access-cv458\") pod \"neutron-688cb45c44-gmn5j\" (UID: \"2c70918c-e944-47ef-8c0c-bbf476d75a77\") " pod="openstack/neutron-688cb45c44-gmn5j" Oct 06 13:57:19 crc kubenswrapper[4757]: I1006 13:57:19.943378 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/2c70918c-e944-47ef-8c0c-bbf476d75a77-config\") pod \"neutron-688cb45c44-gmn5j\" (UID: \"2c70918c-e944-47ef-8c0c-bbf476d75a77\") " pod="openstack/neutron-688cb45c44-gmn5j" Oct 06 13:57:19 crc kubenswrapper[4757]: I1006 13:57:19.943492 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2c70918c-e944-47ef-8c0c-bbf476d75a77-combined-ca-bundle\") pod \"neutron-688cb45c44-gmn5j\" (UID: \"2c70918c-e944-47ef-8c0c-bbf476d75a77\") " pod="openstack/neutron-688cb45c44-gmn5j" Oct 06 13:57:19 crc kubenswrapper[4757]: I1006 13:57:19.943577 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/25ca3ce2-1e8e-4cab-b349-84fb82c52a56-ovsdbserver-sb\") pod \"dnsmasq-dns-6fc489599-7rtht\" (UID: \"25ca3ce2-1e8e-4cab-b349-84fb82c52a56\") " pod="openstack/dnsmasq-dns-6fc489599-7rtht" Oct 06 13:57:19 crc kubenswrapper[4757]: I1006 13:57:19.943600 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/25ca3ce2-1e8e-4cab-b349-84fb82c52a56-ovsdbserver-nb\") pod \"dnsmasq-dns-6fc489599-7rtht\" (UID: \"25ca3ce2-1e8e-4cab-b349-84fb82c52a56\") " pod="openstack/dnsmasq-dns-6fc489599-7rtht" Oct 06 13:57:19 crc kubenswrapper[4757]: I1006 13:57:19.943643 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/2c70918c-e944-47ef-8c0c-bbf476d75a77-httpd-config\") pod \"neutron-688cb45c44-gmn5j\" (UID: \"2c70918c-e944-47ef-8c0c-bbf476d75a77\") " pod="openstack/neutron-688cb45c44-gmn5j" Oct 06 13:57:19 crc kubenswrapper[4757]: I1006 13:57:19.943673 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/25ca3ce2-1e8e-4cab-b349-84fb82c52a56-dns-swift-storage-0\") pod \"dnsmasq-dns-6fc489599-7rtht\" (UID: \"25ca3ce2-1e8e-4cab-b349-84fb82c52a56\") " pod="openstack/dnsmasq-dns-6fc489599-7rtht" Oct 06 13:57:19 crc kubenswrapper[4757]: I1006 13:57:19.943689 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/2c70918c-e944-47ef-8c0c-bbf476d75a77-ovndb-tls-certs\") pod \"neutron-688cb45c44-gmn5j\" (UID: \"2c70918c-e944-47ef-8c0c-bbf476d75a77\") " pod="openstack/neutron-688cb45c44-gmn5j" Oct 06 13:57:19 crc kubenswrapper[4757]: I1006 13:57:19.943743 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/25ca3ce2-1e8e-4cab-b349-84fb82c52a56-config\") pod \"dnsmasq-dns-6fc489599-7rtht\" (UID: \"25ca3ce2-1e8e-4cab-b349-84fb82c52a56\") " pod="openstack/dnsmasq-dns-6fc489599-7rtht" Oct 06 13:57:19 crc kubenswrapper[4757]: I1006 13:57:19.943763 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/25ca3ce2-1e8e-4cab-b349-84fb82c52a56-dns-svc\") pod \"dnsmasq-dns-6fc489599-7rtht\" (UID: \"25ca3ce2-1e8e-4cab-b349-84fb82c52a56\") " pod="openstack/dnsmasq-dns-6fc489599-7rtht" Oct 06 13:57:19 crc kubenswrapper[4757]: I1006 13:57:19.943781 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-js9bh\" (UniqueName: \"kubernetes.io/projected/25ca3ce2-1e8e-4cab-b349-84fb82c52a56-kube-api-access-js9bh\") pod \"dnsmasq-dns-6fc489599-7rtht\" (UID: \"25ca3ce2-1e8e-4cab-b349-84fb82c52a56\") " pod="openstack/dnsmasq-dns-6fc489599-7rtht" Oct 06 13:57:19 crc kubenswrapper[4757]: I1006 13:57:19.945245 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/25ca3ce2-1e8e-4cab-b349-84fb82c52a56-config\") pod \"dnsmasq-dns-6fc489599-7rtht\" (UID: \"25ca3ce2-1e8e-4cab-b349-84fb82c52a56\") " pod="openstack/dnsmasq-dns-6fc489599-7rtht" Oct 06 13:57:19 crc kubenswrapper[4757]: I1006 13:57:19.947479 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/25ca3ce2-1e8e-4cab-b349-84fb82c52a56-ovsdbserver-sb\") pod \"dnsmasq-dns-6fc489599-7rtht\" (UID: \"25ca3ce2-1e8e-4cab-b349-84fb82c52a56\") " pod="openstack/dnsmasq-dns-6fc489599-7rtht" Oct 06 13:57:19 crc kubenswrapper[4757]: I1006 13:57:19.948313 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/25ca3ce2-1e8e-4cab-b349-84fb82c52a56-dns-swift-storage-0\") pod \"dnsmasq-dns-6fc489599-7rtht\" (UID: \"25ca3ce2-1e8e-4cab-b349-84fb82c52a56\") " pod="openstack/dnsmasq-dns-6fc489599-7rtht" Oct 06 13:57:19 crc kubenswrapper[4757]: I1006 13:57:19.951446 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/25ca3ce2-1e8e-4cab-b349-84fb82c52a56-dns-svc\") pod \"dnsmasq-dns-6fc489599-7rtht\" (UID: \"25ca3ce2-1e8e-4cab-b349-84fb82c52a56\") " pod="openstack/dnsmasq-dns-6fc489599-7rtht" Oct 06 13:57:19 crc kubenswrapper[4757]: I1006 13:57:19.951510 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/25ca3ce2-1e8e-4cab-b349-84fb82c52a56-ovsdbserver-nb\") pod \"dnsmasq-dns-6fc489599-7rtht\" (UID: \"25ca3ce2-1e8e-4cab-b349-84fb82c52a56\") " pod="openstack/dnsmasq-dns-6fc489599-7rtht" Oct 06 13:57:19 crc kubenswrapper[4757]: I1006 13:57:19.967997 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-js9bh\" (UniqueName: \"kubernetes.io/projected/25ca3ce2-1e8e-4cab-b349-84fb82c52a56-kube-api-access-js9bh\") pod \"dnsmasq-dns-6fc489599-7rtht\" (UID: \"25ca3ce2-1e8e-4cab-b349-84fb82c52a56\") " pod="openstack/dnsmasq-dns-6fc489599-7rtht" Oct 06 13:57:20 crc kubenswrapper[4757]: I1006 13:57:20.046637 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cv458\" (UniqueName: \"kubernetes.io/projected/2c70918c-e944-47ef-8c0c-bbf476d75a77-kube-api-access-cv458\") pod \"neutron-688cb45c44-gmn5j\" (UID: \"2c70918c-e944-47ef-8c0c-bbf476d75a77\") " pod="openstack/neutron-688cb45c44-gmn5j" Oct 06 13:57:20 crc kubenswrapper[4757]: I1006 13:57:20.046742 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/2c70918c-e944-47ef-8c0c-bbf476d75a77-config\") pod \"neutron-688cb45c44-gmn5j\" (UID: \"2c70918c-e944-47ef-8c0c-bbf476d75a77\") " pod="openstack/neutron-688cb45c44-gmn5j" Oct 06 13:57:20 crc kubenswrapper[4757]: I1006 13:57:20.047739 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2c70918c-e944-47ef-8c0c-bbf476d75a77-combined-ca-bundle\") pod \"neutron-688cb45c44-gmn5j\" (UID: \"2c70918c-e944-47ef-8c0c-bbf476d75a77\") " pod="openstack/neutron-688cb45c44-gmn5j" Oct 06 13:57:20 crc kubenswrapper[4757]: I1006 13:57:20.047966 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/2c70918c-e944-47ef-8c0c-bbf476d75a77-httpd-config\") pod \"neutron-688cb45c44-gmn5j\" (UID: \"2c70918c-e944-47ef-8c0c-bbf476d75a77\") " pod="openstack/neutron-688cb45c44-gmn5j" Oct 06 13:57:20 crc kubenswrapper[4757]: I1006 13:57:20.048008 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/2c70918c-e944-47ef-8c0c-bbf476d75a77-ovndb-tls-certs\") pod \"neutron-688cb45c44-gmn5j\" (UID: \"2c70918c-e944-47ef-8c0c-bbf476d75a77\") " pod="openstack/neutron-688cb45c44-gmn5j" Oct 06 13:57:20 crc kubenswrapper[4757]: I1006 13:57:20.051721 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/2c70918c-e944-47ef-8c0c-bbf476d75a77-ovndb-tls-certs\") pod \"neutron-688cb45c44-gmn5j\" (UID: \"2c70918c-e944-47ef-8c0c-bbf476d75a77\") " pod="openstack/neutron-688cb45c44-gmn5j" Oct 06 13:57:20 crc kubenswrapper[4757]: I1006 13:57:20.051747 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/2c70918c-e944-47ef-8c0c-bbf476d75a77-config\") pod \"neutron-688cb45c44-gmn5j\" (UID: \"2c70918c-e944-47ef-8c0c-bbf476d75a77\") " pod="openstack/neutron-688cb45c44-gmn5j" Oct 06 13:57:20 crc kubenswrapper[4757]: I1006 13:57:20.051825 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2c70918c-e944-47ef-8c0c-bbf476d75a77-combined-ca-bundle\") pod \"neutron-688cb45c44-gmn5j\" (UID: \"2c70918c-e944-47ef-8c0c-bbf476d75a77\") " pod="openstack/neutron-688cb45c44-gmn5j" Oct 06 13:57:20 crc kubenswrapper[4757]: I1006 13:57:20.057911 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/2c70918c-e944-47ef-8c0c-bbf476d75a77-httpd-config\") pod \"neutron-688cb45c44-gmn5j\" (UID: \"2c70918c-e944-47ef-8c0c-bbf476d75a77\") " pod="openstack/neutron-688cb45c44-gmn5j" Oct 06 13:57:20 crc kubenswrapper[4757]: I1006 13:57:20.064077 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cv458\" (UniqueName: \"kubernetes.io/projected/2c70918c-e944-47ef-8c0c-bbf476d75a77-kube-api-access-cv458\") pod \"neutron-688cb45c44-gmn5j\" (UID: \"2c70918c-e944-47ef-8c0c-bbf476d75a77\") " pod="openstack/neutron-688cb45c44-gmn5j" Oct 06 13:57:20 crc kubenswrapper[4757]: I1006 13:57:20.066609 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6fc489599-7rtht" Oct 06 13:57:20 crc kubenswrapper[4757]: I1006 13:57:20.201394 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-688cb45c44-gmn5j" Oct 06 13:57:20 crc kubenswrapper[4757]: I1006 13:57:20.508612 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-84f6d7bf7f-57wpq" podUID="3cc03073-fe96-41a7-9b32-ad87247ed9b8" containerName="dnsmasq-dns" containerID="cri-o://79083e7a545ad51132722ba67b692439220b962aba079110022d05f672e5fff2" gracePeriod=10 Oct 06 13:57:21 crc kubenswrapper[4757]: I1006 13:57:21.525804 4757 generic.go:334] "Generic (PLEG): container finished" podID="3cc03073-fe96-41a7-9b32-ad87247ed9b8" containerID="79083e7a545ad51132722ba67b692439220b962aba079110022d05f672e5fff2" exitCode=0 Oct 06 13:57:21 crc kubenswrapper[4757]: I1006 13:57:21.525880 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84f6d7bf7f-57wpq" event={"ID":"3cc03073-fe96-41a7-9b32-ad87247ed9b8","Type":"ContainerDied","Data":"79083e7a545ad51132722ba67b692439220b962aba079110022d05f672e5fff2"} Oct 06 13:57:21 crc kubenswrapper[4757]: I1006 13:57:21.868838 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-646975b848-hkqzh" Oct 06 13:57:21 crc kubenswrapper[4757]: I1006 13:57:21.895227 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-587f5f887c-jf8v5"] Oct 06 13:57:21 crc kubenswrapper[4757]: I1006 13:57:21.897149 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-587f5f887c-jf8v5" Oct 06 13:57:21 crc kubenswrapper[4757]: I1006 13:57:21.909240 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-internal-svc" Oct 06 13:57:21 crc kubenswrapper[4757]: I1006 13:57:21.909342 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-public-svc" Oct 06 13:57:21 crc kubenswrapper[4757]: I1006 13:57:21.919738 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-587f5f887c-jf8v5"] Oct 06 13:57:21 crc kubenswrapper[4757]: I1006 13:57:21.982555 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sznj2\" (UniqueName: \"kubernetes.io/projected/0aa7bf0a-b5c3-4db6-b401-e8512e1df933-kube-api-access-sznj2\") pod \"neutron-587f5f887c-jf8v5\" (UID: \"0aa7bf0a-b5c3-4db6-b401-e8512e1df933\") " pod="openstack/neutron-587f5f887c-jf8v5" Oct 06 13:57:21 crc kubenswrapper[4757]: I1006 13:57:21.982619 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/0aa7bf0a-b5c3-4db6-b401-e8512e1df933-httpd-config\") pod \"neutron-587f5f887c-jf8v5\" (UID: \"0aa7bf0a-b5c3-4db6-b401-e8512e1df933\") " pod="openstack/neutron-587f5f887c-jf8v5" Oct 06 13:57:21 crc kubenswrapper[4757]: I1006 13:57:21.982751 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/0aa7bf0a-b5c3-4db6-b401-e8512e1df933-ovndb-tls-certs\") pod \"neutron-587f5f887c-jf8v5\" (UID: \"0aa7bf0a-b5c3-4db6-b401-e8512e1df933\") " pod="openstack/neutron-587f5f887c-jf8v5" Oct 06 13:57:21 crc kubenswrapper[4757]: I1006 13:57:21.982834 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/0aa7bf0a-b5c3-4db6-b401-e8512e1df933-config\") pod \"neutron-587f5f887c-jf8v5\" (UID: \"0aa7bf0a-b5c3-4db6-b401-e8512e1df933\") " pod="openstack/neutron-587f5f887c-jf8v5" Oct 06 13:57:21 crc kubenswrapper[4757]: I1006 13:57:21.982883 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0aa7bf0a-b5c3-4db6-b401-e8512e1df933-combined-ca-bundle\") pod \"neutron-587f5f887c-jf8v5\" (UID: \"0aa7bf0a-b5c3-4db6-b401-e8512e1df933\") " pod="openstack/neutron-587f5f887c-jf8v5" Oct 06 13:57:21 crc kubenswrapper[4757]: I1006 13:57:21.982926 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0aa7bf0a-b5c3-4db6-b401-e8512e1df933-internal-tls-certs\") pod \"neutron-587f5f887c-jf8v5\" (UID: \"0aa7bf0a-b5c3-4db6-b401-e8512e1df933\") " pod="openstack/neutron-587f5f887c-jf8v5" Oct 06 13:57:21 crc kubenswrapper[4757]: I1006 13:57:21.982969 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0aa7bf0a-b5c3-4db6-b401-e8512e1df933-public-tls-certs\") pod \"neutron-587f5f887c-jf8v5\" (UID: \"0aa7bf0a-b5c3-4db6-b401-e8512e1df933\") " pod="openstack/neutron-587f5f887c-jf8v5" Oct 06 13:57:22 crc kubenswrapper[4757]: I1006 13:57:22.084301 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/0aa7bf0a-b5c3-4db6-b401-e8512e1df933-config\") pod \"neutron-587f5f887c-jf8v5\" (UID: \"0aa7bf0a-b5c3-4db6-b401-e8512e1df933\") " pod="openstack/neutron-587f5f887c-jf8v5" Oct 06 13:57:22 crc kubenswrapper[4757]: I1006 13:57:22.084364 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0aa7bf0a-b5c3-4db6-b401-e8512e1df933-combined-ca-bundle\") pod \"neutron-587f5f887c-jf8v5\" (UID: \"0aa7bf0a-b5c3-4db6-b401-e8512e1df933\") " pod="openstack/neutron-587f5f887c-jf8v5" Oct 06 13:57:22 crc kubenswrapper[4757]: I1006 13:57:22.084400 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0aa7bf0a-b5c3-4db6-b401-e8512e1df933-internal-tls-certs\") pod \"neutron-587f5f887c-jf8v5\" (UID: \"0aa7bf0a-b5c3-4db6-b401-e8512e1df933\") " pod="openstack/neutron-587f5f887c-jf8v5" Oct 06 13:57:22 crc kubenswrapper[4757]: I1006 13:57:22.084456 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0aa7bf0a-b5c3-4db6-b401-e8512e1df933-public-tls-certs\") pod \"neutron-587f5f887c-jf8v5\" (UID: \"0aa7bf0a-b5c3-4db6-b401-e8512e1df933\") " pod="openstack/neutron-587f5f887c-jf8v5" Oct 06 13:57:22 crc kubenswrapper[4757]: I1006 13:57:22.084500 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sznj2\" (UniqueName: \"kubernetes.io/projected/0aa7bf0a-b5c3-4db6-b401-e8512e1df933-kube-api-access-sznj2\") pod \"neutron-587f5f887c-jf8v5\" (UID: \"0aa7bf0a-b5c3-4db6-b401-e8512e1df933\") " pod="openstack/neutron-587f5f887c-jf8v5" Oct 06 13:57:22 crc kubenswrapper[4757]: I1006 13:57:22.084540 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/0aa7bf0a-b5c3-4db6-b401-e8512e1df933-httpd-config\") pod \"neutron-587f5f887c-jf8v5\" (UID: \"0aa7bf0a-b5c3-4db6-b401-e8512e1df933\") " pod="openstack/neutron-587f5f887c-jf8v5" Oct 06 13:57:22 crc kubenswrapper[4757]: I1006 13:57:22.084588 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/0aa7bf0a-b5c3-4db6-b401-e8512e1df933-ovndb-tls-certs\") pod \"neutron-587f5f887c-jf8v5\" (UID: \"0aa7bf0a-b5c3-4db6-b401-e8512e1df933\") " pod="openstack/neutron-587f5f887c-jf8v5" Oct 06 13:57:22 crc kubenswrapper[4757]: I1006 13:57:22.091729 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0aa7bf0a-b5c3-4db6-b401-e8512e1df933-public-tls-certs\") pod \"neutron-587f5f887c-jf8v5\" (UID: \"0aa7bf0a-b5c3-4db6-b401-e8512e1df933\") " pod="openstack/neutron-587f5f887c-jf8v5" Oct 06 13:57:22 crc kubenswrapper[4757]: I1006 13:57:22.092602 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0aa7bf0a-b5c3-4db6-b401-e8512e1df933-internal-tls-certs\") pod \"neutron-587f5f887c-jf8v5\" (UID: \"0aa7bf0a-b5c3-4db6-b401-e8512e1df933\") " pod="openstack/neutron-587f5f887c-jf8v5" Oct 06 13:57:22 crc kubenswrapper[4757]: I1006 13:57:22.094033 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/0aa7bf0a-b5c3-4db6-b401-e8512e1df933-httpd-config\") pod \"neutron-587f5f887c-jf8v5\" (UID: \"0aa7bf0a-b5c3-4db6-b401-e8512e1df933\") " pod="openstack/neutron-587f5f887c-jf8v5" Oct 06 13:57:22 crc kubenswrapper[4757]: I1006 13:57:22.094468 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/0aa7bf0a-b5c3-4db6-b401-e8512e1df933-ovndb-tls-certs\") pod \"neutron-587f5f887c-jf8v5\" (UID: \"0aa7bf0a-b5c3-4db6-b401-e8512e1df933\") " pod="openstack/neutron-587f5f887c-jf8v5" Oct 06 13:57:22 crc kubenswrapper[4757]: I1006 13:57:22.111228 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/0aa7bf0a-b5c3-4db6-b401-e8512e1df933-config\") pod \"neutron-587f5f887c-jf8v5\" (UID: \"0aa7bf0a-b5c3-4db6-b401-e8512e1df933\") " pod="openstack/neutron-587f5f887c-jf8v5" Oct 06 13:57:22 crc kubenswrapper[4757]: I1006 13:57:22.113517 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sznj2\" (UniqueName: \"kubernetes.io/projected/0aa7bf0a-b5c3-4db6-b401-e8512e1df933-kube-api-access-sznj2\") pod \"neutron-587f5f887c-jf8v5\" (UID: \"0aa7bf0a-b5c3-4db6-b401-e8512e1df933\") " pod="openstack/neutron-587f5f887c-jf8v5" Oct 06 13:57:22 crc kubenswrapper[4757]: I1006 13:57:22.114323 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0aa7bf0a-b5c3-4db6-b401-e8512e1df933-combined-ca-bundle\") pod \"neutron-587f5f887c-jf8v5\" (UID: \"0aa7bf0a-b5c3-4db6-b401-e8512e1df933\") " pod="openstack/neutron-587f5f887c-jf8v5" Oct 06 13:57:22 crc kubenswrapper[4757]: I1006 13:57:22.225222 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-587f5f887c-jf8v5" Oct 06 13:57:23 crc kubenswrapper[4757]: W1006 13:57:23.300482 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podac42434c_8367_4cf2_9134_2d85444f90f4.slice/crio-4270f7b5d7acb50511b04591cd3d9b54a3c2e3e74fb4dac7c77ba3938f308840 WatchSource:0}: Error finding container 4270f7b5d7acb50511b04591cd3d9b54a3c2e3e74fb4dac7c77ba3938f308840: Status 404 returned error can't find the container with id 4270f7b5d7acb50511b04591cd3d9b54a3c2e3e74fb4dac7c77ba3938f308840 Oct 06 13:57:23 crc kubenswrapper[4757]: I1006 13:57:23.359277 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-646975b848-hkqzh" Oct 06 13:57:23 crc kubenswrapper[4757]: I1006 13:57:23.546710 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5c78d5595b-89h8z" event={"ID":"ac42434c-8367-4cf2-9134-2d85444f90f4","Type":"ContainerStarted","Data":"4270f7b5d7acb50511b04591cd3d9b54a3c2e3e74fb4dac7c77ba3938f308840"} Oct 06 13:57:24 crc kubenswrapper[4757]: I1006 13:57:24.143253 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84f6d7bf7f-57wpq" Oct 06 13:57:24 crc kubenswrapper[4757]: I1006 13:57:24.224276 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3cc03073-fe96-41a7-9b32-ad87247ed9b8-ovsdbserver-sb\") pod \"3cc03073-fe96-41a7-9b32-ad87247ed9b8\" (UID: \"3cc03073-fe96-41a7-9b32-ad87247ed9b8\") " Oct 06 13:57:24 crc kubenswrapper[4757]: I1006 13:57:24.224401 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3cc03073-fe96-41a7-9b32-ad87247ed9b8-dns-swift-storage-0\") pod \"3cc03073-fe96-41a7-9b32-ad87247ed9b8\" (UID: \"3cc03073-fe96-41a7-9b32-ad87247ed9b8\") " Oct 06 13:57:24 crc kubenswrapper[4757]: I1006 13:57:24.224544 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqmbm\" (UniqueName: \"kubernetes.io/projected/3cc03073-fe96-41a7-9b32-ad87247ed9b8-kube-api-access-fqmbm\") pod \"3cc03073-fe96-41a7-9b32-ad87247ed9b8\" (UID: \"3cc03073-fe96-41a7-9b32-ad87247ed9b8\") " Oct 06 13:57:24 crc kubenswrapper[4757]: I1006 13:57:24.224606 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3cc03073-fe96-41a7-9b32-ad87247ed9b8-config\") pod \"3cc03073-fe96-41a7-9b32-ad87247ed9b8\" (UID: \"3cc03073-fe96-41a7-9b32-ad87247ed9b8\") " Oct 06 13:57:24 crc kubenswrapper[4757]: I1006 13:57:24.224685 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3cc03073-fe96-41a7-9b32-ad87247ed9b8-ovsdbserver-nb\") pod \"3cc03073-fe96-41a7-9b32-ad87247ed9b8\" (UID: \"3cc03073-fe96-41a7-9b32-ad87247ed9b8\") " Oct 06 13:57:24 crc kubenswrapper[4757]: I1006 13:57:24.224874 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3cc03073-fe96-41a7-9b32-ad87247ed9b8-dns-svc\") pod \"3cc03073-fe96-41a7-9b32-ad87247ed9b8\" (UID: \"3cc03073-fe96-41a7-9b32-ad87247ed9b8\") " Oct 06 13:57:24 crc kubenswrapper[4757]: I1006 13:57:24.236491 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cc03073-fe96-41a7-9b32-ad87247ed9b8-kube-api-access-fqmbm" (OuterVolumeSpecName: "kube-api-access-fqmbm") pod "3cc03073-fe96-41a7-9b32-ad87247ed9b8" (UID: "3cc03073-fe96-41a7-9b32-ad87247ed9b8"). InnerVolumeSpecName "kube-api-access-fqmbm". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:57:24 crc kubenswrapper[4757]: I1006 13:57:24.286752 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cc03073-fe96-41a7-9b32-ad87247ed9b8-config" (OuterVolumeSpecName: "config") pod "3cc03073-fe96-41a7-9b32-ad87247ed9b8" (UID: "3cc03073-fe96-41a7-9b32-ad87247ed9b8"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:57:24 crc kubenswrapper[4757]: I1006 13:57:24.293511 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cc03073-fe96-41a7-9b32-ad87247ed9b8-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "3cc03073-fe96-41a7-9b32-ad87247ed9b8" (UID: "3cc03073-fe96-41a7-9b32-ad87247ed9b8"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:57:24 crc kubenswrapper[4757]: I1006 13:57:24.293946 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cc03073-fe96-41a7-9b32-ad87247ed9b8-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "3cc03073-fe96-41a7-9b32-ad87247ed9b8" (UID: "3cc03073-fe96-41a7-9b32-ad87247ed9b8"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:57:24 crc kubenswrapper[4757]: I1006 13:57:24.302444 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cc03073-fe96-41a7-9b32-ad87247ed9b8-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "3cc03073-fe96-41a7-9b32-ad87247ed9b8" (UID: "3cc03073-fe96-41a7-9b32-ad87247ed9b8"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:57:24 crc kubenswrapper[4757]: I1006 13:57:24.321059 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cc03073-fe96-41a7-9b32-ad87247ed9b8-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "3cc03073-fe96-41a7-9b32-ad87247ed9b8" (UID: "3cc03073-fe96-41a7-9b32-ad87247ed9b8"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:57:24 crc kubenswrapper[4757]: I1006 13:57:24.327945 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqmbm\" (UniqueName: \"kubernetes.io/projected/3cc03073-fe96-41a7-9b32-ad87247ed9b8-kube-api-access-fqmbm\") on node \"crc\" DevicePath \"\"" Oct 06 13:57:24 crc kubenswrapper[4757]: I1006 13:57:24.327975 4757 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3cc03073-fe96-41a7-9b32-ad87247ed9b8-config\") on node \"crc\" DevicePath \"\"" Oct 06 13:57:24 crc kubenswrapper[4757]: I1006 13:57:24.327988 4757 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3cc03073-fe96-41a7-9b32-ad87247ed9b8-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 06 13:57:24 crc kubenswrapper[4757]: I1006 13:57:24.327999 4757 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3cc03073-fe96-41a7-9b32-ad87247ed9b8-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 06 13:57:24 crc kubenswrapper[4757]: I1006 13:57:24.328009 4757 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3cc03073-fe96-41a7-9b32-ad87247ed9b8-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 06 13:57:24 crc kubenswrapper[4757]: I1006 13:57:24.328021 4757 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3cc03073-fe96-41a7-9b32-ad87247ed9b8-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 06 13:57:24 crc kubenswrapper[4757]: I1006 13:57:24.557262 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84f6d7bf7f-57wpq" event={"ID":"3cc03073-fe96-41a7-9b32-ad87247ed9b8","Type":"ContainerDied","Data":"515b0eb200fc7341a864d080e4a735913eee30f31fa16815477cf5bfa0d8b97e"} Oct 06 13:57:24 crc kubenswrapper[4757]: I1006 13:57:24.557308 4757 scope.go:117] "RemoveContainer" containerID="79083e7a545ad51132722ba67b692439220b962aba079110022d05f672e5fff2" Oct 06 13:57:24 crc kubenswrapper[4757]: I1006 13:57:24.557499 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84f6d7bf7f-57wpq" Oct 06 13:57:24 crc kubenswrapper[4757]: I1006 13:57:24.632862 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-84f6d7bf7f-57wpq"] Oct 06 13:57:24 crc kubenswrapper[4757]: I1006 13:57:24.653220 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-84f6d7bf7f-57wpq"] Oct 06 13:57:24 crc kubenswrapper[4757]: I1006 13:57:24.831994 4757 scope.go:117] "RemoveContainer" containerID="97dd337fe9b2389d0d437a588f41fe4346726d70f9efb19e303cfd124065fa80" Oct 06 13:57:25 crc kubenswrapper[4757]: I1006 13:57:25.225844 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-587f5f887c-jf8v5"] Oct 06 13:57:25 crc kubenswrapper[4757]: I1006 13:57:25.323885 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-688cb45c44-gmn5j"] Oct 06 13:57:25 crc kubenswrapper[4757]: I1006 13:57:25.582708 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6fc489599-7rtht"] Oct 06 13:57:25 crc kubenswrapper[4757]: I1006 13:57:25.584559 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5c78d5595b-89h8z" event={"ID":"ac42434c-8367-4cf2-9134-2d85444f90f4","Type":"ContainerStarted","Data":"92484d139159526d958a23b09b865ab4b1219b4eb5727898baa360c9ff0a42c2"} Oct 06 13:57:25 crc kubenswrapper[4757]: I1006 13:57:25.585748 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-587f5f887c-jf8v5" event={"ID":"0aa7bf0a-b5c3-4db6-b401-e8512e1df933","Type":"ContainerStarted","Data":"959c4bee82d55f9943c6527e6f86aa380d2506368a6929e96749089ca5928814"} Oct 06 13:57:25 crc kubenswrapper[4757]: I1006 13:57:25.586522 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-688cb45c44-gmn5j" event={"ID":"2c70918c-e944-47ef-8c0c-bbf476d75a77","Type":"ContainerStarted","Data":"05015caba8141658e81fb8c70494b1d49d21c8e3f807be41bf3c14a2a360d83d"} Oct 06 13:57:26 crc kubenswrapper[4757]: I1006 13:57:26.196957 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cc03073-fe96-41a7-9b32-ad87247ed9b8" path="/var/lib/kubelet/pods/3cc03073-fe96-41a7-9b32-ad87247ed9b8/volumes" Oct 06 13:57:26 crc kubenswrapper[4757]: I1006 13:57:26.601911 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-587f5f887c-jf8v5" event={"ID":"0aa7bf0a-b5c3-4db6-b401-e8512e1df933","Type":"ContainerStarted","Data":"bcd917c1f2b6fb1969bcd04d032fd022e5edf74f68f97b2567d296a2e2e84598"} Oct 06 13:57:26 crc kubenswrapper[4757]: I1006 13:57:26.602288 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-587f5f887c-jf8v5" event={"ID":"0aa7bf0a-b5c3-4db6-b401-e8512e1df933","Type":"ContainerStarted","Data":"a6b5253b08d5415774fa5b2cc0628bffd3d26d6c9de7019fd00ae41a8fba7edf"} Oct 06 13:57:26 crc kubenswrapper[4757]: I1006 13:57:26.603533 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-587f5f887c-jf8v5" Oct 06 13:57:26 crc kubenswrapper[4757]: I1006 13:57:26.613166 4757 generic.go:334] "Generic (PLEG): container finished" podID="25ca3ce2-1e8e-4cab-b349-84fb82c52a56" containerID="17d7c1c3d9be3488cc60672b77690532199b118dcafd84380eeb6ca9c394edae" exitCode=0 Oct 06 13:57:26 crc kubenswrapper[4757]: I1006 13:57:26.613241 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6fc489599-7rtht" event={"ID":"25ca3ce2-1e8e-4cab-b349-84fb82c52a56","Type":"ContainerDied","Data":"17d7c1c3d9be3488cc60672b77690532199b118dcafd84380eeb6ca9c394edae"} Oct 06 13:57:26 crc kubenswrapper[4757]: I1006 13:57:26.613274 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6fc489599-7rtht" event={"ID":"25ca3ce2-1e8e-4cab-b349-84fb82c52a56","Type":"ContainerStarted","Data":"602f41b412a152d3520d5d7a7a6e9e6e2c2ec9141c2bcae4cc67406984b0d9bd"} Oct 06 13:57:26 crc kubenswrapper[4757]: I1006 13:57:26.627597 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-688cb45c44-gmn5j" event={"ID":"2c70918c-e944-47ef-8c0c-bbf476d75a77","Type":"ContainerStarted","Data":"b473a669d877f9b737770a744f0fe354417ac0f8915f067a05b66404ac5974c6"} Oct 06 13:57:26 crc kubenswrapper[4757]: I1006 13:57:26.627649 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-688cb45c44-gmn5j" event={"ID":"2c70918c-e944-47ef-8c0c-bbf476d75a77","Type":"ContainerStarted","Data":"25dcda8ac297ba14d0e224a3fc094317b883ab5928767bb009570cf619d04053"} Oct 06 13:57:26 crc kubenswrapper[4757]: I1006 13:57:26.628498 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-688cb45c44-gmn5j" Oct 06 13:57:26 crc kubenswrapper[4757]: I1006 13:57:26.636312 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-587f5f887c-jf8v5" podStartSLOduration=5.636288433 podStartE2EDuration="5.636288433s" podCreationTimestamp="2025-10-06 13:57:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:57:26.630218817 +0000 UTC m=+1135.127537374" watchObservedRunningTime="2025-10-06 13:57:26.636288433 +0000 UTC m=+1135.133606990" Oct 06 13:57:26 crc kubenswrapper[4757]: I1006 13:57:26.645591 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8","Type":"ContainerStarted","Data":"bc51182febbef4179ff3692f7f6142306fb2459f60ae7e8f3f473d0497e1d207"} Oct 06 13:57:26 crc kubenswrapper[4757]: I1006 13:57:26.645799 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8" containerName="ceilometer-central-agent" containerID="cri-o://6e2deb40869d8e3b337a246d8a53afdfb67a72beae0813b065f38ab3aadcb0db" gracePeriod=30 Oct 06 13:57:26 crc kubenswrapper[4757]: I1006 13:57:26.646079 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 06 13:57:26 crc kubenswrapper[4757]: I1006 13:57:26.646149 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8" containerName="proxy-httpd" containerID="cri-o://bc51182febbef4179ff3692f7f6142306fb2459f60ae7e8f3f473d0497e1d207" gracePeriod=30 Oct 06 13:57:26 crc kubenswrapper[4757]: I1006 13:57:26.646203 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8" containerName="sg-core" containerID="cri-o://d50cf6da79f320cc02c7ebbf624e6b61f19b905001f2f9122545f8be6d6ea724" gracePeriod=30 Oct 06 13:57:26 crc kubenswrapper[4757]: I1006 13:57:26.646242 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8" containerName="ceilometer-notification-agent" containerID="cri-o://8b9fd66a10d9bbb98b57c188dcf86b05a757eeddad190a593c9d44c2af604c7b" gracePeriod=30 Oct 06 13:57:26 crc kubenswrapper[4757]: I1006 13:57:26.652435 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5c78d5595b-89h8z" event={"ID":"ac42434c-8367-4cf2-9134-2d85444f90f4","Type":"ContainerStarted","Data":"9fb3558d9cee93fdd2b8e54924363c8bbc9ddb4127d50abe466cbb11eaf6f296"} Oct 06 13:57:26 crc kubenswrapper[4757]: I1006 13:57:26.653696 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-5c78d5595b-89h8z" Oct 06 13:57:26 crc kubenswrapper[4757]: I1006 13:57:26.653722 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-5c78d5595b-89h8z" Oct 06 13:57:26 crc kubenswrapper[4757]: I1006 13:57:26.673439 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-688cb45c44-gmn5j" podStartSLOduration=7.673422501 podStartE2EDuration="7.673422501s" podCreationTimestamp="2025-10-06 13:57:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:57:26.666026614 +0000 UTC m=+1135.163345151" watchObservedRunningTime="2025-10-06 13:57:26.673422501 +0000 UTC m=+1135.170741038" Oct 06 13:57:26 crc kubenswrapper[4757]: I1006 13:57:26.718326 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=3.564201476 podStartE2EDuration="43.718303256s" podCreationTimestamp="2025-10-06 13:56:43 +0000 UTC" firstStartedPulling="2025-10-06 13:56:44.977802435 +0000 UTC m=+1093.475120962" lastFinishedPulling="2025-10-06 13:57:25.131904205 +0000 UTC m=+1133.629222742" observedRunningTime="2025-10-06 13:57:26.709513027 +0000 UTC m=+1135.206831564" watchObservedRunningTime="2025-10-06 13:57:26.718303256 +0000 UTC m=+1135.215621793" Oct 06 13:57:26 crc kubenswrapper[4757]: I1006 13:57:26.732886 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-5c78d5595b-89h8z" podStartSLOduration=9.732865513 podStartE2EDuration="9.732865513s" podCreationTimestamp="2025-10-06 13:57:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:57:26.726422045 +0000 UTC m=+1135.223740592" watchObservedRunningTime="2025-10-06 13:57:26.732865513 +0000 UTC m=+1135.230184050" Oct 06 13:57:27 crc kubenswrapper[4757]: I1006 13:57:27.664402 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6fc489599-7rtht" event={"ID":"25ca3ce2-1e8e-4cab-b349-84fb82c52a56","Type":"ContainerStarted","Data":"14e4d0f68ad0a7a58e8f785958588888ef8b9418f7d9107506c3557ef11bf2d7"} Oct 06 13:57:27 crc kubenswrapper[4757]: I1006 13:57:27.664758 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6fc489599-7rtht" Oct 06 13:57:27 crc kubenswrapper[4757]: I1006 13:57:27.668023 4757 generic.go:334] "Generic (PLEG): container finished" podID="fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8" containerID="bc51182febbef4179ff3692f7f6142306fb2459f60ae7e8f3f473d0497e1d207" exitCode=0 Oct 06 13:57:27 crc kubenswrapper[4757]: I1006 13:57:27.668072 4757 generic.go:334] "Generic (PLEG): container finished" podID="fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8" containerID="d50cf6da79f320cc02c7ebbf624e6b61f19b905001f2f9122545f8be6d6ea724" exitCode=2 Oct 06 13:57:27 crc kubenswrapper[4757]: I1006 13:57:27.668203 4757 generic.go:334] "Generic (PLEG): container finished" podID="fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8" containerID="6e2deb40869d8e3b337a246d8a53afdfb67a72beae0813b065f38ab3aadcb0db" exitCode=0 Oct 06 13:57:27 crc kubenswrapper[4757]: I1006 13:57:27.668219 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8","Type":"ContainerDied","Data":"bc51182febbef4179ff3692f7f6142306fb2459f60ae7e8f3f473d0497e1d207"} Oct 06 13:57:27 crc kubenswrapper[4757]: I1006 13:57:27.668365 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8","Type":"ContainerDied","Data":"d50cf6da79f320cc02c7ebbf624e6b61f19b905001f2f9122545f8be6d6ea724"} Oct 06 13:57:27 crc kubenswrapper[4757]: I1006 13:57:27.668450 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8","Type":"ContainerDied","Data":"6e2deb40869d8e3b337a246d8a53afdfb67a72beae0813b065f38ab3aadcb0db"} Oct 06 13:57:27 crc kubenswrapper[4757]: I1006 13:57:27.713046 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6fc489599-7rtht" podStartSLOduration=8.713020086 podStartE2EDuration="8.713020086s" podCreationTimestamp="2025-10-06 13:57:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:57:27.700903465 +0000 UTC m=+1136.198222012" watchObservedRunningTime="2025-10-06 13:57:27.713020086 +0000 UTC m=+1136.210338663" Oct 06 13:57:29 crc kubenswrapper[4757]: I1006 13:57:29.861217 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-ff5468974-c5722" Oct 06 13:57:31 crc kubenswrapper[4757]: I1006 13:57:31.711641 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-4nqxd" event={"ID":"f705f558-519e-489e-8ff4-5b3eb4476eff","Type":"ContainerStarted","Data":"c64ab0d20fb2646123dfc9302dd89591647788afb3b670d1b19c581cda2b5162"} Oct 06 13:57:31 crc kubenswrapper[4757]: I1006 13:57:31.716754 4757 generic.go:334] "Generic (PLEG): container finished" podID="fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8" containerID="8b9fd66a10d9bbb98b57c188dcf86b05a757eeddad190a593c9d44c2af604c7b" exitCode=0 Oct 06 13:57:31 crc kubenswrapper[4757]: I1006 13:57:31.716798 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8","Type":"ContainerDied","Data":"8b9fd66a10d9bbb98b57c188dcf86b05a757eeddad190a593c9d44c2af604c7b"} Oct 06 13:57:31 crc kubenswrapper[4757]: I1006 13:57:31.729115 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-4nqxd" podStartSLOduration=2.642551452 podStartE2EDuration="39.729076776s" podCreationTimestamp="2025-10-06 13:56:52 +0000 UTC" firstStartedPulling="2025-10-06 13:56:53.693391318 +0000 UTC m=+1102.190709855" lastFinishedPulling="2025-10-06 13:57:30.779916602 +0000 UTC m=+1139.277235179" observedRunningTime="2025-10-06 13:57:31.727942731 +0000 UTC m=+1140.225261278" watchObservedRunningTime="2025-10-06 13:57:31.729076776 +0000 UTC m=+1140.226395313" Oct 06 13:57:32 crc kubenswrapper[4757]: I1006 13:57:32.045437 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 06 13:57:32 crc kubenswrapper[4757]: I1006 13:57:32.081138 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8-sg-core-conf-yaml\") pod \"fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8\" (UID: \"fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8\") " Oct 06 13:57:32 crc kubenswrapper[4757]: I1006 13:57:32.082337 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8-scripts\") pod \"fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8\" (UID: \"fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8\") " Oct 06 13:57:32 crc kubenswrapper[4757]: I1006 13:57:32.082450 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8-combined-ca-bundle\") pod \"fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8\" (UID: \"fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8\") " Oct 06 13:57:32 crc kubenswrapper[4757]: I1006 13:57:32.082513 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8-run-httpd\") pod \"fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8\" (UID: \"fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8\") " Oct 06 13:57:32 crc kubenswrapper[4757]: I1006 13:57:32.082595 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8-config-data\") pod \"fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8\" (UID: \"fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8\") " Oct 06 13:57:32 crc kubenswrapper[4757]: I1006 13:57:32.082650 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qzr8l\" (UniqueName: \"kubernetes.io/projected/fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8-kube-api-access-qzr8l\") pod \"fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8\" (UID: \"fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8\") " Oct 06 13:57:32 crc kubenswrapper[4757]: I1006 13:57:32.082696 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8-log-httpd\") pod \"fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8\" (UID: \"fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8\") " Oct 06 13:57:32 crc kubenswrapper[4757]: I1006 13:57:32.082918 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8" (UID: "fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 13:57:32 crc kubenswrapper[4757]: I1006 13:57:32.083233 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8" (UID: "fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 13:57:32 crc kubenswrapper[4757]: I1006 13:57:32.083541 4757 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 06 13:57:32 crc kubenswrapper[4757]: I1006 13:57:32.083562 4757 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 06 13:57:32 crc kubenswrapper[4757]: I1006 13:57:32.103343 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8-scripts" (OuterVolumeSpecName: "scripts") pod "fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8" (UID: "fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:57:32 crc kubenswrapper[4757]: I1006 13:57:32.105714 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8-kube-api-access-qzr8l" (OuterVolumeSpecName: "kube-api-access-qzr8l") pod "fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8" (UID: "fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8"). InnerVolumeSpecName "kube-api-access-qzr8l". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:57:32 crc kubenswrapper[4757]: I1006 13:57:32.141909 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8" (UID: "fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:57:32 crc kubenswrapper[4757]: I1006 13:57:32.189018 4757 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 06 13:57:32 crc kubenswrapper[4757]: I1006 13:57:32.189423 4757 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8-scripts\") on node \"crc\" DevicePath \"\"" Oct 06 13:57:32 crc kubenswrapper[4757]: I1006 13:57:32.189535 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qzr8l\" (UniqueName: \"kubernetes.io/projected/fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8-kube-api-access-qzr8l\") on node \"crc\" DevicePath \"\"" Oct 06 13:57:32 crc kubenswrapper[4757]: I1006 13:57:32.195228 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8-config-data" (OuterVolumeSpecName: "config-data") pod "fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8" (UID: "fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:57:32 crc kubenswrapper[4757]: I1006 13:57:32.196959 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8" (UID: "fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:57:32 crc kubenswrapper[4757]: I1006 13:57:32.291203 4757 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 06 13:57:32 crc kubenswrapper[4757]: I1006 13:57:32.291233 4757 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8-config-data\") on node \"crc\" DevicePath \"\"" Oct 06 13:57:32 crc kubenswrapper[4757]: I1006 13:57:32.762339 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8","Type":"ContainerDied","Data":"ba168b87fe13f00fba56bd3b2154e79d489173b1616a451adbf1f5cc08595538"} Oct 06 13:57:32 crc kubenswrapper[4757]: I1006 13:57:32.762667 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 06 13:57:32 crc kubenswrapper[4757]: I1006 13:57:32.762737 4757 scope.go:117] "RemoveContainer" containerID="bc51182febbef4179ff3692f7f6142306fb2459f60ae7e8f3f473d0497e1d207" Oct 06 13:57:32 crc kubenswrapper[4757]: I1006 13:57:32.792459 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 06 13:57:32 crc kubenswrapper[4757]: I1006 13:57:32.804751 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 06 13:57:32 crc kubenswrapper[4757]: I1006 13:57:32.822181 4757 scope.go:117] "RemoveContainer" containerID="d50cf6da79f320cc02c7ebbf624e6b61f19b905001f2f9122545f8be6d6ea724" Oct 06 13:57:32 crc kubenswrapper[4757]: I1006 13:57:32.823870 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 06 13:57:32 crc kubenswrapper[4757]: E1006 13:57:32.824285 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3cc03073-fe96-41a7-9b32-ad87247ed9b8" containerName="init" Oct 06 13:57:32 crc kubenswrapper[4757]: I1006 13:57:32.824305 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="3cc03073-fe96-41a7-9b32-ad87247ed9b8" containerName="init" Oct 06 13:57:32 crc kubenswrapper[4757]: E1006 13:57:32.824321 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3cc03073-fe96-41a7-9b32-ad87247ed9b8" containerName="dnsmasq-dns" Oct 06 13:57:32 crc kubenswrapper[4757]: I1006 13:57:32.824328 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="3cc03073-fe96-41a7-9b32-ad87247ed9b8" containerName="dnsmasq-dns" Oct 06 13:57:32 crc kubenswrapper[4757]: E1006 13:57:32.824350 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8" containerName="sg-core" Oct 06 13:57:32 crc kubenswrapper[4757]: I1006 13:57:32.824356 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8" containerName="sg-core" Oct 06 13:57:32 crc kubenswrapper[4757]: E1006 13:57:32.824369 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8" containerName="ceilometer-notification-agent" Oct 06 13:57:32 crc kubenswrapper[4757]: I1006 13:57:32.824374 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8" containerName="ceilometer-notification-agent" Oct 06 13:57:32 crc kubenswrapper[4757]: E1006 13:57:32.824387 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8" containerName="ceilometer-central-agent" Oct 06 13:57:32 crc kubenswrapper[4757]: I1006 13:57:32.824393 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8" containerName="ceilometer-central-agent" Oct 06 13:57:32 crc kubenswrapper[4757]: E1006 13:57:32.824408 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8" containerName="proxy-httpd" Oct 06 13:57:32 crc kubenswrapper[4757]: I1006 13:57:32.824414 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8" containerName="proxy-httpd" Oct 06 13:57:32 crc kubenswrapper[4757]: I1006 13:57:32.824568 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8" containerName="ceilometer-central-agent" Oct 06 13:57:32 crc kubenswrapper[4757]: I1006 13:57:32.824584 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="3cc03073-fe96-41a7-9b32-ad87247ed9b8" containerName="dnsmasq-dns" Oct 06 13:57:32 crc kubenswrapper[4757]: I1006 13:57:32.824590 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8" containerName="proxy-httpd" Oct 06 13:57:32 crc kubenswrapper[4757]: I1006 13:57:32.824605 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8" containerName="ceilometer-notification-agent" Oct 06 13:57:32 crc kubenswrapper[4757]: I1006 13:57:32.824615 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8" containerName="sg-core" Oct 06 13:57:32 crc kubenswrapper[4757]: I1006 13:57:32.826173 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 06 13:57:32 crc kubenswrapper[4757]: I1006 13:57:32.828956 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 06 13:57:32 crc kubenswrapper[4757]: I1006 13:57:32.830107 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 06 13:57:32 crc kubenswrapper[4757]: I1006 13:57:32.837135 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 06 13:57:32 crc kubenswrapper[4757]: I1006 13:57:32.863282 4757 scope.go:117] "RemoveContainer" containerID="8b9fd66a10d9bbb98b57c188dcf86b05a757eeddad190a593c9d44c2af604c7b" Oct 06 13:57:32 crc kubenswrapper[4757]: I1006 13:57:32.908688 4757 scope.go:117] "RemoveContainer" containerID="6e2deb40869d8e3b337a246d8a53afdfb67a72beae0813b065f38ab3aadcb0db" Oct 06 13:57:32 crc kubenswrapper[4757]: I1006 13:57:32.911527 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/13ea51c0-01eb-4d6c-b994-094b1fd1614e-scripts\") pod \"ceilometer-0\" (UID: \"13ea51c0-01eb-4d6c-b994-094b1fd1614e\") " pod="openstack/ceilometer-0" Oct 06 13:57:32 crc kubenswrapper[4757]: I1006 13:57:32.911646 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/13ea51c0-01eb-4d6c-b994-094b1fd1614e-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"13ea51c0-01eb-4d6c-b994-094b1fd1614e\") " pod="openstack/ceilometer-0" Oct 06 13:57:32 crc kubenswrapper[4757]: I1006 13:57:32.911768 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/13ea51c0-01eb-4d6c-b994-094b1fd1614e-config-data\") pod \"ceilometer-0\" (UID: \"13ea51c0-01eb-4d6c-b994-094b1fd1614e\") " pod="openstack/ceilometer-0" Oct 06 13:57:32 crc kubenswrapper[4757]: I1006 13:57:32.911826 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ml2jv\" (UniqueName: \"kubernetes.io/projected/13ea51c0-01eb-4d6c-b994-094b1fd1614e-kube-api-access-ml2jv\") pod \"ceilometer-0\" (UID: \"13ea51c0-01eb-4d6c-b994-094b1fd1614e\") " pod="openstack/ceilometer-0" Oct 06 13:57:32 crc kubenswrapper[4757]: I1006 13:57:32.911946 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/13ea51c0-01eb-4d6c-b994-094b1fd1614e-run-httpd\") pod \"ceilometer-0\" (UID: \"13ea51c0-01eb-4d6c-b994-094b1fd1614e\") " pod="openstack/ceilometer-0" Oct 06 13:57:32 crc kubenswrapper[4757]: I1006 13:57:32.911971 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/13ea51c0-01eb-4d6c-b994-094b1fd1614e-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"13ea51c0-01eb-4d6c-b994-094b1fd1614e\") " pod="openstack/ceilometer-0" Oct 06 13:57:32 crc kubenswrapper[4757]: I1006 13:57:32.912055 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/13ea51c0-01eb-4d6c-b994-094b1fd1614e-log-httpd\") pod \"ceilometer-0\" (UID: \"13ea51c0-01eb-4d6c-b994-094b1fd1614e\") " pod="openstack/ceilometer-0" Oct 06 13:57:33 crc kubenswrapper[4757]: I1006 13:57:33.013492 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/13ea51c0-01eb-4d6c-b994-094b1fd1614e-config-data\") pod \"ceilometer-0\" (UID: \"13ea51c0-01eb-4d6c-b994-094b1fd1614e\") " pod="openstack/ceilometer-0" Oct 06 13:57:33 crc kubenswrapper[4757]: I1006 13:57:33.013561 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ml2jv\" (UniqueName: \"kubernetes.io/projected/13ea51c0-01eb-4d6c-b994-094b1fd1614e-kube-api-access-ml2jv\") pod \"ceilometer-0\" (UID: \"13ea51c0-01eb-4d6c-b994-094b1fd1614e\") " pod="openstack/ceilometer-0" Oct 06 13:57:33 crc kubenswrapper[4757]: I1006 13:57:33.013647 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/13ea51c0-01eb-4d6c-b994-094b1fd1614e-run-httpd\") pod \"ceilometer-0\" (UID: \"13ea51c0-01eb-4d6c-b994-094b1fd1614e\") " pod="openstack/ceilometer-0" Oct 06 13:57:33 crc kubenswrapper[4757]: I1006 13:57:33.013673 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/13ea51c0-01eb-4d6c-b994-094b1fd1614e-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"13ea51c0-01eb-4d6c-b994-094b1fd1614e\") " pod="openstack/ceilometer-0" Oct 06 13:57:33 crc kubenswrapper[4757]: I1006 13:57:33.013728 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/13ea51c0-01eb-4d6c-b994-094b1fd1614e-log-httpd\") pod \"ceilometer-0\" (UID: \"13ea51c0-01eb-4d6c-b994-094b1fd1614e\") " pod="openstack/ceilometer-0" Oct 06 13:57:33 crc kubenswrapper[4757]: I1006 13:57:33.013772 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/13ea51c0-01eb-4d6c-b994-094b1fd1614e-scripts\") pod \"ceilometer-0\" (UID: \"13ea51c0-01eb-4d6c-b994-094b1fd1614e\") " pod="openstack/ceilometer-0" Oct 06 13:57:33 crc kubenswrapper[4757]: I1006 13:57:33.013812 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/13ea51c0-01eb-4d6c-b994-094b1fd1614e-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"13ea51c0-01eb-4d6c-b994-094b1fd1614e\") " pod="openstack/ceilometer-0" Oct 06 13:57:33 crc kubenswrapper[4757]: I1006 13:57:33.015203 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/13ea51c0-01eb-4d6c-b994-094b1fd1614e-run-httpd\") pod \"ceilometer-0\" (UID: \"13ea51c0-01eb-4d6c-b994-094b1fd1614e\") " pod="openstack/ceilometer-0" Oct 06 13:57:33 crc kubenswrapper[4757]: I1006 13:57:33.015272 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/13ea51c0-01eb-4d6c-b994-094b1fd1614e-log-httpd\") pod \"ceilometer-0\" (UID: \"13ea51c0-01eb-4d6c-b994-094b1fd1614e\") " pod="openstack/ceilometer-0" Oct 06 13:57:33 crc kubenswrapper[4757]: I1006 13:57:33.021834 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/13ea51c0-01eb-4d6c-b994-094b1fd1614e-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"13ea51c0-01eb-4d6c-b994-094b1fd1614e\") " pod="openstack/ceilometer-0" Oct 06 13:57:33 crc kubenswrapper[4757]: I1006 13:57:33.021847 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/13ea51c0-01eb-4d6c-b994-094b1fd1614e-scripts\") pod \"ceilometer-0\" (UID: \"13ea51c0-01eb-4d6c-b994-094b1fd1614e\") " pod="openstack/ceilometer-0" Oct 06 13:57:33 crc kubenswrapper[4757]: I1006 13:57:33.022390 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/13ea51c0-01eb-4d6c-b994-094b1fd1614e-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"13ea51c0-01eb-4d6c-b994-094b1fd1614e\") " pod="openstack/ceilometer-0" Oct 06 13:57:33 crc kubenswrapper[4757]: I1006 13:57:33.029273 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/13ea51c0-01eb-4d6c-b994-094b1fd1614e-config-data\") pod \"ceilometer-0\" (UID: \"13ea51c0-01eb-4d6c-b994-094b1fd1614e\") " pod="openstack/ceilometer-0" Oct 06 13:57:33 crc kubenswrapper[4757]: I1006 13:57:33.034733 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ml2jv\" (UniqueName: \"kubernetes.io/projected/13ea51c0-01eb-4d6c-b994-094b1fd1614e-kube-api-access-ml2jv\") pod \"ceilometer-0\" (UID: \"13ea51c0-01eb-4d6c-b994-094b1fd1614e\") " pod="openstack/ceilometer-0" Oct 06 13:57:33 crc kubenswrapper[4757]: I1006 13:57:33.068351 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-678769d845-d782m" Oct 06 13:57:33 crc kubenswrapper[4757]: I1006 13:57:33.158827 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 06 13:57:33 crc kubenswrapper[4757]: I1006 13:57:33.696281 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 06 13:57:33 crc kubenswrapper[4757]: I1006 13:57:33.776263 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"13ea51c0-01eb-4d6c-b994-094b1fd1614e","Type":"ContainerStarted","Data":"ba124818bd2ae4c232680d063bf5a02dc7b4119b8f8d8e427c005238102d7814"} Oct 06 13:57:34 crc kubenswrapper[4757]: I1006 13:57:34.189820 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8" path="/var/lib/kubelet/pods/fe8b5ba6-8cd2-4ca4-aad4-c2bb76da00d8/volumes" Oct 06 13:57:34 crc kubenswrapper[4757]: I1006 13:57:34.361232 4757 patch_prober.go:28] interesting pod/machine-config-daemon-7tb7h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 06 13:57:34 crc kubenswrapper[4757]: I1006 13:57:34.361289 4757 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 06 13:57:34 crc kubenswrapper[4757]: I1006 13:57:34.742497 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-5c78d5595b-89h8z" Oct 06 13:57:34 crc kubenswrapper[4757]: I1006 13:57:34.785285 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"13ea51c0-01eb-4d6c-b994-094b1fd1614e","Type":"ContainerStarted","Data":"0d8220d11024223a25ae3c1fb113885cdc619ad1d18ff73e2c04765ba9af2c83"} Oct 06 13:57:34 crc kubenswrapper[4757]: I1006 13:57:34.863808 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-5c78d5595b-89h8z" Oct 06 13:57:34 crc kubenswrapper[4757]: I1006 13:57:34.931614 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-646975b848-hkqzh"] Oct 06 13:57:34 crc kubenswrapper[4757]: I1006 13:57:34.931877 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-646975b848-hkqzh" podUID="fdb7d73e-b6bf-4276-8633-45746b12cc1b" containerName="barbican-api-log" containerID="cri-o://a6499787e9bc87b23f95ff3ab051f4595b715c4dc4aa887df5be7685992688ae" gracePeriod=30 Oct 06 13:57:34 crc kubenswrapper[4757]: I1006 13:57:34.932334 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-646975b848-hkqzh" podUID="fdb7d73e-b6bf-4276-8633-45746b12cc1b" containerName="barbican-api" containerID="cri-o://c0391f98b3cd9f27e73aab04396f1562344e2cb3a623f09fde302a4ff18f4572" gracePeriod=30 Oct 06 13:57:35 crc kubenswrapper[4757]: I1006 13:57:35.069354 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6fc489599-7rtht" Oct 06 13:57:35 crc kubenswrapper[4757]: I1006 13:57:35.132435 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-59bc97769f-w5b7p"] Oct 06 13:57:35 crc kubenswrapper[4757]: I1006 13:57:35.133448 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-59bc97769f-w5b7p" podUID="8b1f051f-58c8-4226-b9f0-1104e5a262f5" containerName="dnsmasq-dns" containerID="cri-o://70f83406b7e66172c69fd259f8074b5cf0cb95201ffe093e5c6fe5770f82f593" gracePeriod=10 Oct 06 13:57:35 crc kubenswrapper[4757]: I1006 13:57:35.588687 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59bc97769f-w5b7p" Oct 06 13:57:35 crc kubenswrapper[4757]: I1006 13:57:35.676140 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8b1f051f-58c8-4226-b9f0-1104e5a262f5-dns-svc\") pod \"8b1f051f-58c8-4226-b9f0-1104e5a262f5\" (UID: \"8b1f051f-58c8-4226-b9f0-1104e5a262f5\") " Oct 06 13:57:35 crc kubenswrapper[4757]: I1006 13:57:35.676246 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8b1f051f-58c8-4226-b9f0-1104e5a262f5-config\") pod \"8b1f051f-58c8-4226-b9f0-1104e5a262f5\" (UID: \"8b1f051f-58c8-4226-b9f0-1104e5a262f5\") " Oct 06 13:57:35 crc kubenswrapper[4757]: I1006 13:57:35.676290 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8b1f051f-58c8-4226-b9f0-1104e5a262f5-ovsdbserver-nb\") pod \"8b1f051f-58c8-4226-b9f0-1104e5a262f5\" (UID: \"8b1f051f-58c8-4226-b9f0-1104e5a262f5\") " Oct 06 13:57:35 crc kubenswrapper[4757]: I1006 13:57:35.676378 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wbm4c\" (UniqueName: \"kubernetes.io/projected/8b1f051f-58c8-4226-b9f0-1104e5a262f5-kube-api-access-wbm4c\") pod \"8b1f051f-58c8-4226-b9f0-1104e5a262f5\" (UID: \"8b1f051f-58c8-4226-b9f0-1104e5a262f5\") " Oct 06 13:57:35 crc kubenswrapper[4757]: I1006 13:57:35.676463 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8b1f051f-58c8-4226-b9f0-1104e5a262f5-dns-swift-storage-0\") pod \"8b1f051f-58c8-4226-b9f0-1104e5a262f5\" (UID: \"8b1f051f-58c8-4226-b9f0-1104e5a262f5\") " Oct 06 13:57:35 crc kubenswrapper[4757]: I1006 13:57:35.676955 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8b1f051f-58c8-4226-b9f0-1104e5a262f5-ovsdbserver-sb\") pod \"8b1f051f-58c8-4226-b9f0-1104e5a262f5\" (UID: \"8b1f051f-58c8-4226-b9f0-1104e5a262f5\") " Oct 06 13:57:35 crc kubenswrapper[4757]: I1006 13:57:35.696302 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8b1f051f-58c8-4226-b9f0-1104e5a262f5-kube-api-access-wbm4c" (OuterVolumeSpecName: "kube-api-access-wbm4c") pod "8b1f051f-58c8-4226-b9f0-1104e5a262f5" (UID: "8b1f051f-58c8-4226-b9f0-1104e5a262f5"). InnerVolumeSpecName "kube-api-access-wbm4c". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:57:35 crc kubenswrapper[4757]: I1006 13:57:35.740182 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8b1f051f-58c8-4226-b9f0-1104e5a262f5-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "8b1f051f-58c8-4226-b9f0-1104e5a262f5" (UID: "8b1f051f-58c8-4226-b9f0-1104e5a262f5"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:57:35 crc kubenswrapper[4757]: I1006 13:57:35.743472 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8b1f051f-58c8-4226-b9f0-1104e5a262f5-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "8b1f051f-58c8-4226-b9f0-1104e5a262f5" (UID: "8b1f051f-58c8-4226-b9f0-1104e5a262f5"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:57:35 crc kubenswrapper[4757]: I1006 13:57:35.760229 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8b1f051f-58c8-4226-b9f0-1104e5a262f5-config" (OuterVolumeSpecName: "config") pod "8b1f051f-58c8-4226-b9f0-1104e5a262f5" (UID: "8b1f051f-58c8-4226-b9f0-1104e5a262f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:57:35 crc kubenswrapper[4757]: I1006 13:57:35.764192 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8b1f051f-58c8-4226-b9f0-1104e5a262f5-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "8b1f051f-58c8-4226-b9f0-1104e5a262f5" (UID: "8b1f051f-58c8-4226-b9f0-1104e5a262f5"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:57:35 crc kubenswrapper[4757]: I1006 13:57:35.768464 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8b1f051f-58c8-4226-b9f0-1104e5a262f5-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "8b1f051f-58c8-4226-b9f0-1104e5a262f5" (UID: "8b1f051f-58c8-4226-b9f0-1104e5a262f5"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:57:35 crc kubenswrapper[4757]: I1006 13:57:35.779407 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wbm4c\" (UniqueName: \"kubernetes.io/projected/8b1f051f-58c8-4226-b9f0-1104e5a262f5-kube-api-access-wbm4c\") on node \"crc\" DevicePath \"\"" Oct 06 13:57:35 crc kubenswrapper[4757]: I1006 13:57:35.779450 4757 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8b1f051f-58c8-4226-b9f0-1104e5a262f5-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 06 13:57:35 crc kubenswrapper[4757]: I1006 13:57:35.779467 4757 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8b1f051f-58c8-4226-b9f0-1104e5a262f5-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 06 13:57:35 crc kubenswrapper[4757]: I1006 13:57:35.779479 4757 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8b1f051f-58c8-4226-b9f0-1104e5a262f5-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 06 13:57:35 crc kubenswrapper[4757]: I1006 13:57:35.779491 4757 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8b1f051f-58c8-4226-b9f0-1104e5a262f5-config\") on node \"crc\" DevicePath \"\"" Oct 06 13:57:35 crc kubenswrapper[4757]: I1006 13:57:35.779504 4757 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8b1f051f-58c8-4226-b9f0-1104e5a262f5-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 06 13:57:35 crc kubenswrapper[4757]: I1006 13:57:35.800406 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"13ea51c0-01eb-4d6c-b994-094b1fd1614e","Type":"ContainerStarted","Data":"d60c297b338571921c37dcacb6e2c17b2b97cdcb71e2e185c30834fb79c54817"} Oct 06 13:57:35 crc kubenswrapper[4757]: I1006 13:57:35.802681 4757 generic.go:334] "Generic (PLEG): container finished" podID="8b1f051f-58c8-4226-b9f0-1104e5a262f5" containerID="70f83406b7e66172c69fd259f8074b5cf0cb95201ffe093e5c6fe5770f82f593" exitCode=0 Oct 06 13:57:35 crc kubenswrapper[4757]: I1006 13:57:35.802712 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59bc97769f-w5b7p" event={"ID":"8b1f051f-58c8-4226-b9f0-1104e5a262f5","Type":"ContainerDied","Data":"70f83406b7e66172c69fd259f8074b5cf0cb95201ffe093e5c6fe5770f82f593"} Oct 06 13:57:35 crc kubenswrapper[4757]: I1006 13:57:35.802745 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59bc97769f-w5b7p" event={"ID":"8b1f051f-58c8-4226-b9f0-1104e5a262f5","Type":"ContainerDied","Data":"d716c8b4c7b9db010bd6376091af9c50b85f614b83df080929ccbc0b3b405a6a"} Oct 06 13:57:35 crc kubenswrapper[4757]: I1006 13:57:35.802769 4757 scope.go:117] "RemoveContainer" containerID="70f83406b7e66172c69fd259f8074b5cf0cb95201ffe093e5c6fe5770f82f593" Oct 06 13:57:35 crc kubenswrapper[4757]: I1006 13:57:35.802696 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59bc97769f-w5b7p" Oct 06 13:57:35 crc kubenswrapper[4757]: I1006 13:57:35.806466 4757 generic.go:334] "Generic (PLEG): container finished" podID="fdb7d73e-b6bf-4276-8633-45746b12cc1b" containerID="a6499787e9bc87b23f95ff3ab051f4595b715c4dc4aa887df5be7685992688ae" exitCode=143 Oct 06 13:57:35 crc kubenswrapper[4757]: I1006 13:57:35.806515 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-646975b848-hkqzh" event={"ID":"fdb7d73e-b6bf-4276-8633-45746b12cc1b","Type":"ContainerDied","Data":"a6499787e9bc87b23f95ff3ab051f4595b715c4dc4aa887df5be7685992688ae"} Oct 06 13:57:35 crc kubenswrapper[4757]: I1006 13:57:35.826255 4757 scope.go:117] "RemoveContainer" containerID="2fa7f14fca3ba9c8ec8766a4739c90ac5ac3483e23210bc51989494ad055e3f3" Oct 06 13:57:35 crc kubenswrapper[4757]: I1006 13:57:35.858688 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-59bc97769f-w5b7p"] Oct 06 13:57:35 crc kubenswrapper[4757]: I1006 13:57:35.862246 4757 scope.go:117] "RemoveContainer" containerID="70f83406b7e66172c69fd259f8074b5cf0cb95201ffe093e5c6fe5770f82f593" Oct 06 13:57:35 crc kubenswrapper[4757]: E1006 13:57:35.864221 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"70f83406b7e66172c69fd259f8074b5cf0cb95201ffe093e5c6fe5770f82f593\": container with ID starting with 70f83406b7e66172c69fd259f8074b5cf0cb95201ffe093e5c6fe5770f82f593 not found: ID does not exist" containerID="70f83406b7e66172c69fd259f8074b5cf0cb95201ffe093e5c6fe5770f82f593" Oct 06 13:57:35 crc kubenswrapper[4757]: I1006 13:57:35.864261 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"70f83406b7e66172c69fd259f8074b5cf0cb95201ffe093e5c6fe5770f82f593"} err="failed to get container status \"70f83406b7e66172c69fd259f8074b5cf0cb95201ffe093e5c6fe5770f82f593\": rpc error: code = NotFound desc = could not find container \"70f83406b7e66172c69fd259f8074b5cf0cb95201ffe093e5c6fe5770f82f593\": container with ID starting with 70f83406b7e66172c69fd259f8074b5cf0cb95201ffe093e5c6fe5770f82f593 not found: ID does not exist" Oct 06 13:57:35 crc kubenswrapper[4757]: I1006 13:57:35.864289 4757 scope.go:117] "RemoveContainer" containerID="2fa7f14fca3ba9c8ec8766a4739c90ac5ac3483e23210bc51989494ad055e3f3" Oct 06 13:57:35 crc kubenswrapper[4757]: E1006 13:57:35.867019 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2fa7f14fca3ba9c8ec8766a4739c90ac5ac3483e23210bc51989494ad055e3f3\": container with ID starting with 2fa7f14fca3ba9c8ec8766a4739c90ac5ac3483e23210bc51989494ad055e3f3 not found: ID does not exist" containerID="2fa7f14fca3ba9c8ec8766a4739c90ac5ac3483e23210bc51989494ad055e3f3" Oct 06 13:57:35 crc kubenswrapper[4757]: I1006 13:57:35.867053 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2fa7f14fca3ba9c8ec8766a4739c90ac5ac3483e23210bc51989494ad055e3f3"} err="failed to get container status \"2fa7f14fca3ba9c8ec8766a4739c90ac5ac3483e23210bc51989494ad055e3f3\": rpc error: code = NotFound desc = could not find container \"2fa7f14fca3ba9c8ec8766a4739c90ac5ac3483e23210bc51989494ad055e3f3\": container with ID starting with 2fa7f14fca3ba9c8ec8766a4739c90ac5ac3483e23210bc51989494ad055e3f3 not found: ID does not exist" Oct 06 13:57:35 crc kubenswrapper[4757]: I1006 13:57:35.871878 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-59bc97769f-w5b7p"] Oct 06 13:57:36 crc kubenswrapper[4757]: I1006 13:57:36.022573 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Oct 06 13:57:36 crc kubenswrapper[4757]: E1006 13:57:36.022932 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8b1f051f-58c8-4226-b9f0-1104e5a262f5" containerName="dnsmasq-dns" Oct 06 13:57:36 crc kubenswrapper[4757]: I1006 13:57:36.022950 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="8b1f051f-58c8-4226-b9f0-1104e5a262f5" containerName="dnsmasq-dns" Oct 06 13:57:36 crc kubenswrapper[4757]: E1006 13:57:36.022976 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8b1f051f-58c8-4226-b9f0-1104e5a262f5" containerName="init" Oct 06 13:57:36 crc kubenswrapper[4757]: I1006 13:57:36.022983 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="8b1f051f-58c8-4226-b9f0-1104e5a262f5" containerName="init" Oct 06 13:57:36 crc kubenswrapper[4757]: I1006 13:57:36.023321 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="8b1f051f-58c8-4226-b9f0-1104e5a262f5" containerName="dnsmasq-dns" Oct 06 13:57:36 crc kubenswrapper[4757]: I1006 13:57:36.025480 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Oct 06 13:57:36 crc kubenswrapper[4757]: I1006 13:57:36.027347 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Oct 06 13:57:36 crc kubenswrapper[4757]: I1006 13:57:36.027542 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-z249g" Oct 06 13:57:36 crc kubenswrapper[4757]: I1006 13:57:36.027844 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Oct 06 13:57:36 crc kubenswrapper[4757]: I1006 13:57:36.056038 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Oct 06 13:57:36 crc kubenswrapper[4757]: I1006 13:57:36.083921 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2520c55-6861-4a9a-aa4e-8c1b1ace3dfb-combined-ca-bundle\") pod \"openstackclient\" (UID: \"f2520c55-6861-4a9a-aa4e-8c1b1ace3dfb\") " pod="openstack/openstackclient" Oct 06 13:57:36 crc kubenswrapper[4757]: I1006 13:57:36.084252 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tl8nl\" (UniqueName: \"kubernetes.io/projected/f2520c55-6861-4a9a-aa4e-8c1b1ace3dfb-kube-api-access-tl8nl\") pod \"openstackclient\" (UID: \"f2520c55-6861-4a9a-aa4e-8c1b1ace3dfb\") " pod="openstack/openstackclient" Oct 06 13:57:36 crc kubenswrapper[4757]: I1006 13:57:36.084378 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/f2520c55-6861-4a9a-aa4e-8c1b1ace3dfb-openstack-config-secret\") pod \"openstackclient\" (UID: \"f2520c55-6861-4a9a-aa4e-8c1b1ace3dfb\") " pod="openstack/openstackclient" Oct 06 13:57:36 crc kubenswrapper[4757]: I1006 13:57:36.084517 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/f2520c55-6861-4a9a-aa4e-8c1b1ace3dfb-openstack-config\") pod \"openstackclient\" (UID: \"f2520c55-6861-4a9a-aa4e-8c1b1ace3dfb\") " pod="openstack/openstackclient" Oct 06 13:57:36 crc kubenswrapper[4757]: I1006 13:57:36.185750 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2520c55-6861-4a9a-aa4e-8c1b1ace3dfb-combined-ca-bundle\") pod \"openstackclient\" (UID: \"f2520c55-6861-4a9a-aa4e-8c1b1ace3dfb\") " pod="openstack/openstackclient" Oct 06 13:57:36 crc kubenswrapper[4757]: I1006 13:57:36.186027 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tl8nl\" (UniqueName: \"kubernetes.io/projected/f2520c55-6861-4a9a-aa4e-8c1b1ace3dfb-kube-api-access-tl8nl\") pod \"openstackclient\" (UID: \"f2520c55-6861-4a9a-aa4e-8c1b1ace3dfb\") " pod="openstack/openstackclient" Oct 06 13:57:36 crc kubenswrapper[4757]: I1006 13:57:36.186064 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/f2520c55-6861-4a9a-aa4e-8c1b1ace3dfb-openstack-config-secret\") pod \"openstackclient\" (UID: \"f2520c55-6861-4a9a-aa4e-8c1b1ace3dfb\") " pod="openstack/openstackclient" Oct 06 13:57:36 crc kubenswrapper[4757]: I1006 13:57:36.186122 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/f2520c55-6861-4a9a-aa4e-8c1b1ace3dfb-openstack-config\") pod \"openstackclient\" (UID: \"f2520c55-6861-4a9a-aa4e-8c1b1ace3dfb\") " pod="openstack/openstackclient" Oct 06 13:57:36 crc kubenswrapper[4757]: I1006 13:57:36.187230 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/f2520c55-6861-4a9a-aa4e-8c1b1ace3dfb-openstack-config\") pod \"openstackclient\" (UID: \"f2520c55-6861-4a9a-aa4e-8c1b1ace3dfb\") " pod="openstack/openstackclient" Oct 06 13:57:36 crc kubenswrapper[4757]: I1006 13:57:36.191262 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2520c55-6861-4a9a-aa4e-8c1b1ace3dfb-combined-ca-bundle\") pod \"openstackclient\" (UID: \"f2520c55-6861-4a9a-aa4e-8c1b1ace3dfb\") " pod="openstack/openstackclient" Oct 06 13:57:36 crc kubenswrapper[4757]: I1006 13:57:36.191283 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8b1f051f-58c8-4226-b9f0-1104e5a262f5" path="/var/lib/kubelet/pods/8b1f051f-58c8-4226-b9f0-1104e5a262f5/volumes" Oct 06 13:57:36 crc kubenswrapper[4757]: I1006 13:57:36.191745 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/f2520c55-6861-4a9a-aa4e-8c1b1ace3dfb-openstack-config-secret\") pod \"openstackclient\" (UID: \"f2520c55-6861-4a9a-aa4e-8c1b1ace3dfb\") " pod="openstack/openstackclient" Oct 06 13:57:36 crc kubenswrapper[4757]: I1006 13:57:36.205672 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tl8nl\" (UniqueName: \"kubernetes.io/projected/f2520c55-6861-4a9a-aa4e-8c1b1ace3dfb-kube-api-access-tl8nl\") pod \"openstackclient\" (UID: \"f2520c55-6861-4a9a-aa4e-8c1b1ace3dfb\") " pod="openstack/openstackclient" Oct 06 13:57:36 crc kubenswrapper[4757]: I1006 13:57:36.305088 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Oct 06 13:57:36 crc kubenswrapper[4757]: I1006 13:57:36.800707 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Oct 06 13:57:36 crc kubenswrapper[4757]: I1006 13:57:36.816003 4757 generic.go:334] "Generic (PLEG): container finished" podID="f705f558-519e-489e-8ff4-5b3eb4476eff" containerID="c64ab0d20fb2646123dfc9302dd89591647788afb3b670d1b19c581cda2b5162" exitCode=0 Oct 06 13:57:36 crc kubenswrapper[4757]: I1006 13:57:36.816072 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-4nqxd" event={"ID":"f705f558-519e-489e-8ff4-5b3eb4476eff","Type":"ContainerDied","Data":"c64ab0d20fb2646123dfc9302dd89591647788afb3b670d1b19c581cda2b5162"} Oct 06 13:57:36 crc kubenswrapper[4757]: I1006 13:57:36.817317 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"f2520c55-6861-4a9a-aa4e-8c1b1ace3dfb","Type":"ContainerStarted","Data":"cfc5cb420ffa106196db991e4b148273b13ae5cc01ad88876d4fe19d6b91ba18"} Oct 06 13:57:36 crc kubenswrapper[4757]: I1006 13:57:36.819255 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"13ea51c0-01eb-4d6c-b994-094b1fd1614e","Type":"ContainerStarted","Data":"a3d694055a2f0251b8053f7c5838166ce1d777a6ee357dcf3f852e28846503d8"} Oct 06 13:57:37 crc kubenswrapper[4757]: I1006 13:57:37.836617 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"13ea51c0-01eb-4d6c-b994-094b1fd1614e","Type":"ContainerStarted","Data":"9faed425f06d2562824a823f5642d3e0f00b9c457a01fe9d85bff147745ff6f3"} Oct 06 13:57:37 crc kubenswrapper[4757]: I1006 13:57:37.836987 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 06 13:57:37 crc kubenswrapper[4757]: I1006 13:57:37.867152 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.212858575 podStartE2EDuration="5.867135268s" podCreationTimestamp="2025-10-06 13:57:32 +0000 UTC" firstStartedPulling="2025-10-06 13:57:33.703139606 +0000 UTC m=+1142.200458133" lastFinishedPulling="2025-10-06 13:57:37.357416289 +0000 UTC m=+1145.854734826" observedRunningTime="2025-10-06 13:57:37.858733851 +0000 UTC m=+1146.356052388" watchObservedRunningTime="2025-10-06 13:57:37.867135268 +0000 UTC m=+1146.364453795" Oct 06 13:57:38 crc kubenswrapper[4757]: I1006 13:57:38.118775 4757 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-646975b848-hkqzh" podUID="fdb7d73e-b6bf-4276-8633-45746b12cc1b" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.156:9311/healthcheck\": read tcp 10.217.0.2:59286->10.217.0.156:9311: read: connection reset by peer" Oct 06 13:57:38 crc kubenswrapper[4757]: I1006 13:57:38.118914 4757 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-646975b848-hkqzh" podUID="fdb7d73e-b6bf-4276-8633-45746b12cc1b" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.156:9311/healthcheck\": read tcp 10.217.0.2:59298->10.217.0.156:9311: read: connection reset by peer" Oct 06 13:57:38 crc kubenswrapper[4757]: I1006 13:57:38.266908 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-4nqxd" Oct 06 13:57:38 crc kubenswrapper[4757]: I1006 13:57:38.331753 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/f705f558-519e-489e-8ff4-5b3eb4476eff-db-sync-config-data\") pod \"f705f558-519e-489e-8ff4-5b3eb4476eff\" (UID: \"f705f558-519e-489e-8ff4-5b3eb4476eff\") " Oct 06 13:57:38 crc kubenswrapper[4757]: I1006 13:57:38.331910 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fj4s7\" (UniqueName: \"kubernetes.io/projected/f705f558-519e-489e-8ff4-5b3eb4476eff-kube-api-access-fj4s7\") pod \"f705f558-519e-489e-8ff4-5b3eb4476eff\" (UID: \"f705f558-519e-489e-8ff4-5b3eb4476eff\") " Oct 06 13:57:38 crc kubenswrapper[4757]: I1006 13:57:38.331955 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f705f558-519e-489e-8ff4-5b3eb4476eff-combined-ca-bundle\") pod \"f705f558-519e-489e-8ff4-5b3eb4476eff\" (UID: \"f705f558-519e-489e-8ff4-5b3eb4476eff\") " Oct 06 13:57:38 crc kubenswrapper[4757]: I1006 13:57:38.331971 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f705f558-519e-489e-8ff4-5b3eb4476eff-etc-machine-id\") pod \"f705f558-519e-489e-8ff4-5b3eb4476eff\" (UID: \"f705f558-519e-489e-8ff4-5b3eb4476eff\") " Oct 06 13:57:38 crc kubenswrapper[4757]: I1006 13:57:38.332015 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f705f558-519e-489e-8ff4-5b3eb4476eff-config-data\") pod \"f705f558-519e-489e-8ff4-5b3eb4476eff\" (UID: \"f705f558-519e-489e-8ff4-5b3eb4476eff\") " Oct 06 13:57:38 crc kubenswrapper[4757]: I1006 13:57:38.332112 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f705f558-519e-489e-8ff4-5b3eb4476eff-scripts\") pod \"f705f558-519e-489e-8ff4-5b3eb4476eff\" (UID: \"f705f558-519e-489e-8ff4-5b3eb4476eff\") " Oct 06 13:57:38 crc kubenswrapper[4757]: I1006 13:57:38.332133 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f705f558-519e-489e-8ff4-5b3eb4476eff-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "f705f558-519e-489e-8ff4-5b3eb4476eff" (UID: "f705f558-519e-489e-8ff4-5b3eb4476eff"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 06 13:57:38 crc kubenswrapper[4757]: I1006 13:57:38.332466 4757 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f705f558-519e-489e-8ff4-5b3eb4476eff-etc-machine-id\") on node \"crc\" DevicePath \"\"" Oct 06 13:57:38 crc kubenswrapper[4757]: I1006 13:57:38.338992 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f705f558-519e-489e-8ff4-5b3eb4476eff-scripts" (OuterVolumeSpecName: "scripts") pod "f705f558-519e-489e-8ff4-5b3eb4476eff" (UID: "f705f558-519e-489e-8ff4-5b3eb4476eff"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:57:38 crc kubenswrapper[4757]: I1006 13:57:38.340199 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f705f558-519e-489e-8ff4-5b3eb4476eff-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "f705f558-519e-489e-8ff4-5b3eb4476eff" (UID: "f705f558-519e-489e-8ff4-5b3eb4476eff"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:57:38 crc kubenswrapper[4757]: I1006 13:57:38.354493 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f705f558-519e-489e-8ff4-5b3eb4476eff-kube-api-access-fj4s7" (OuterVolumeSpecName: "kube-api-access-fj4s7") pod "f705f558-519e-489e-8ff4-5b3eb4476eff" (UID: "f705f558-519e-489e-8ff4-5b3eb4476eff"). InnerVolumeSpecName "kube-api-access-fj4s7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:57:38 crc kubenswrapper[4757]: I1006 13:57:38.399201 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f705f558-519e-489e-8ff4-5b3eb4476eff-config-data" (OuterVolumeSpecName: "config-data") pod "f705f558-519e-489e-8ff4-5b3eb4476eff" (UID: "f705f558-519e-489e-8ff4-5b3eb4476eff"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:57:38 crc kubenswrapper[4757]: I1006 13:57:38.399237 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f705f558-519e-489e-8ff4-5b3eb4476eff-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f705f558-519e-489e-8ff4-5b3eb4476eff" (UID: "f705f558-519e-489e-8ff4-5b3eb4476eff"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:57:38 crc kubenswrapper[4757]: I1006 13:57:38.434363 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fj4s7\" (UniqueName: \"kubernetes.io/projected/f705f558-519e-489e-8ff4-5b3eb4476eff-kube-api-access-fj4s7\") on node \"crc\" DevicePath \"\"" Oct 06 13:57:38 crc kubenswrapper[4757]: I1006 13:57:38.434396 4757 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f705f558-519e-489e-8ff4-5b3eb4476eff-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 06 13:57:38 crc kubenswrapper[4757]: I1006 13:57:38.434407 4757 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f705f558-519e-489e-8ff4-5b3eb4476eff-config-data\") on node \"crc\" DevicePath \"\"" Oct 06 13:57:38 crc kubenswrapper[4757]: I1006 13:57:38.434418 4757 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f705f558-519e-489e-8ff4-5b3eb4476eff-scripts\") on node \"crc\" DevicePath \"\"" Oct 06 13:57:38 crc kubenswrapper[4757]: I1006 13:57:38.434429 4757 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/f705f558-519e-489e-8ff4-5b3eb4476eff-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Oct 06 13:57:38 crc kubenswrapper[4757]: I1006 13:57:38.470514 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-646975b848-hkqzh" Oct 06 13:57:38 crc kubenswrapper[4757]: I1006 13:57:38.535543 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fdb7d73e-b6bf-4276-8633-45746b12cc1b-logs\") pod \"fdb7d73e-b6bf-4276-8633-45746b12cc1b\" (UID: \"fdb7d73e-b6bf-4276-8633-45746b12cc1b\") " Oct 06 13:57:38 crc kubenswrapper[4757]: I1006 13:57:38.535589 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fdb7d73e-b6bf-4276-8633-45746b12cc1b-config-data-custom\") pod \"fdb7d73e-b6bf-4276-8633-45746b12cc1b\" (UID: \"fdb7d73e-b6bf-4276-8633-45746b12cc1b\") " Oct 06 13:57:38 crc kubenswrapper[4757]: I1006 13:57:38.535738 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4knrl\" (UniqueName: \"kubernetes.io/projected/fdb7d73e-b6bf-4276-8633-45746b12cc1b-kube-api-access-4knrl\") pod \"fdb7d73e-b6bf-4276-8633-45746b12cc1b\" (UID: \"fdb7d73e-b6bf-4276-8633-45746b12cc1b\") " Oct 06 13:57:38 crc kubenswrapper[4757]: I1006 13:57:38.535761 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fdb7d73e-b6bf-4276-8633-45746b12cc1b-config-data\") pod \"fdb7d73e-b6bf-4276-8633-45746b12cc1b\" (UID: \"fdb7d73e-b6bf-4276-8633-45746b12cc1b\") " Oct 06 13:57:38 crc kubenswrapper[4757]: I1006 13:57:38.535828 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fdb7d73e-b6bf-4276-8633-45746b12cc1b-combined-ca-bundle\") pod \"fdb7d73e-b6bf-4276-8633-45746b12cc1b\" (UID: \"fdb7d73e-b6bf-4276-8633-45746b12cc1b\") " Oct 06 13:57:38 crc kubenswrapper[4757]: I1006 13:57:38.536079 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fdb7d73e-b6bf-4276-8633-45746b12cc1b-logs" (OuterVolumeSpecName: "logs") pod "fdb7d73e-b6bf-4276-8633-45746b12cc1b" (UID: "fdb7d73e-b6bf-4276-8633-45746b12cc1b"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 13:57:38 crc kubenswrapper[4757]: I1006 13:57:38.536211 4757 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fdb7d73e-b6bf-4276-8633-45746b12cc1b-logs\") on node \"crc\" DevicePath \"\"" Oct 06 13:57:38 crc kubenswrapper[4757]: I1006 13:57:38.539775 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fdb7d73e-b6bf-4276-8633-45746b12cc1b-kube-api-access-4knrl" (OuterVolumeSpecName: "kube-api-access-4knrl") pod "fdb7d73e-b6bf-4276-8633-45746b12cc1b" (UID: "fdb7d73e-b6bf-4276-8633-45746b12cc1b"). InnerVolumeSpecName "kube-api-access-4knrl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:57:38 crc kubenswrapper[4757]: I1006 13:57:38.539935 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fdb7d73e-b6bf-4276-8633-45746b12cc1b-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "fdb7d73e-b6bf-4276-8633-45746b12cc1b" (UID: "fdb7d73e-b6bf-4276-8633-45746b12cc1b"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:57:38 crc kubenswrapper[4757]: I1006 13:57:38.564675 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fdb7d73e-b6bf-4276-8633-45746b12cc1b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fdb7d73e-b6bf-4276-8633-45746b12cc1b" (UID: "fdb7d73e-b6bf-4276-8633-45746b12cc1b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:57:38 crc kubenswrapper[4757]: I1006 13:57:38.605090 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fdb7d73e-b6bf-4276-8633-45746b12cc1b-config-data" (OuterVolumeSpecName: "config-data") pod "fdb7d73e-b6bf-4276-8633-45746b12cc1b" (UID: "fdb7d73e-b6bf-4276-8633-45746b12cc1b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:57:38 crc kubenswrapper[4757]: I1006 13:57:38.637378 4757 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fdb7d73e-b6bf-4276-8633-45746b12cc1b-config-data-custom\") on node \"crc\" DevicePath \"\"" Oct 06 13:57:38 crc kubenswrapper[4757]: I1006 13:57:38.637414 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4knrl\" (UniqueName: \"kubernetes.io/projected/fdb7d73e-b6bf-4276-8633-45746b12cc1b-kube-api-access-4knrl\") on node \"crc\" DevicePath \"\"" Oct 06 13:57:38 crc kubenswrapper[4757]: I1006 13:57:38.637426 4757 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fdb7d73e-b6bf-4276-8633-45746b12cc1b-config-data\") on node \"crc\" DevicePath \"\"" Oct 06 13:57:38 crc kubenswrapper[4757]: I1006 13:57:38.637434 4757 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fdb7d73e-b6bf-4276-8633-45746b12cc1b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 06 13:57:38 crc kubenswrapper[4757]: I1006 13:57:38.859834 4757 generic.go:334] "Generic (PLEG): container finished" podID="fdb7d73e-b6bf-4276-8633-45746b12cc1b" containerID="c0391f98b3cd9f27e73aab04396f1562344e2cb3a623f09fde302a4ff18f4572" exitCode=0 Oct 06 13:57:38 crc kubenswrapper[4757]: I1006 13:57:38.859912 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-646975b848-hkqzh" event={"ID":"fdb7d73e-b6bf-4276-8633-45746b12cc1b","Type":"ContainerDied","Data":"c0391f98b3cd9f27e73aab04396f1562344e2cb3a623f09fde302a4ff18f4572"} Oct 06 13:57:38 crc kubenswrapper[4757]: I1006 13:57:38.859944 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-646975b848-hkqzh" event={"ID":"fdb7d73e-b6bf-4276-8633-45746b12cc1b","Type":"ContainerDied","Data":"f57026646b348655af0eb904c64c7f195c451c06c5a9de0d288719a738815953"} Oct 06 13:57:38 crc kubenswrapper[4757]: I1006 13:57:38.860661 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-646975b848-hkqzh" Oct 06 13:57:38 crc kubenswrapper[4757]: I1006 13:57:38.860702 4757 scope.go:117] "RemoveContainer" containerID="c0391f98b3cd9f27e73aab04396f1562344e2cb3a623f09fde302a4ff18f4572" Oct 06 13:57:38 crc kubenswrapper[4757]: I1006 13:57:38.865596 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-4nqxd" Oct 06 13:57:38 crc kubenswrapper[4757]: I1006 13:57:38.867062 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-4nqxd" event={"ID":"f705f558-519e-489e-8ff4-5b3eb4476eff","Type":"ContainerDied","Data":"5ebc87f9d0dd75bcaf02f76704f012782a0b2d7f879978c06b4b68ec0893c6f6"} Oct 06 13:57:38 crc kubenswrapper[4757]: I1006 13:57:38.867112 4757 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5ebc87f9d0dd75bcaf02f76704f012782a0b2d7f879978c06b4b68ec0893c6f6" Oct 06 13:57:38 crc kubenswrapper[4757]: I1006 13:57:38.893734 4757 scope.go:117] "RemoveContainer" containerID="a6499787e9bc87b23f95ff3ab051f4595b715c4dc4aa887df5be7685992688ae" Oct 06 13:57:38 crc kubenswrapper[4757]: I1006 13:57:38.911140 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-646975b848-hkqzh"] Oct 06 13:57:38 crc kubenswrapper[4757]: I1006 13:57:38.913274 4757 scope.go:117] "RemoveContainer" containerID="c0391f98b3cd9f27e73aab04396f1562344e2cb3a623f09fde302a4ff18f4572" Oct 06 13:57:38 crc kubenswrapper[4757]: E1006 13:57:38.914538 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c0391f98b3cd9f27e73aab04396f1562344e2cb3a623f09fde302a4ff18f4572\": container with ID starting with c0391f98b3cd9f27e73aab04396f1562344e2cb3a623f09fde302a4ff18f4572 not found: ID does not exist" containerID="c0391f98b3cd9f27e73aab04396f1562344e2cb3a623f09fde302a4ff18f4572" Oct 06 13:57:38 crc kubenswrapper[4757]: I1006 13:57:38.914575 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c0391f98b3cd9f27e73aab04396f1562344e2cb3a623f09fde302a4ff18f4572"} err="failed to get container status \"c0391f98b3cd9f27e73aab04396f1562344e2cb3a623f09fde302a4ff18f4572\": rpc error: code = NotFound desc = could not find container \"c0391f98b3cd9f27e73aab04396f1562344e2cb3a623f09fde302a4ff18f4572\": container with ID starting with c0391f98b3cd9f27e73aab04396f1562344e2cb3a623f09fde302a4ff18f4572 not found: ID does not exist" Oct 06 13:57:38 crc kubenswrapper[4757]: I1006 13:57:38.914599 4757 scope.go:117] "RemoveContainer" containerID="a6499787e9bc87b23f95ff3ab051f4595b715c4dc4aa887df5be7685992688ae" Oct 06 13:57:38 crc kubenswrapper[4757]: E1006 13:57:38.915669 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a6499787e9bc87b23f95ff3ab051f4595b715c4dc4aa887df5be7685992688ae\": container with ID starting with a6499787e9bc87b23f95ff3ab051f4595b715c4dc4aa887df5be7685992688ae not found: ID does not exist" containerID="a6499787e9bc87b23f95ff3ab051f4595b715c4dc4aa887df5be7685992688ae" Oct 06 13:57:38 crc kubenswrapper[4757]: I1006 13:57:38.915695 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a6499787e9bc87b23f95ff3ab051f4595b715c4dc4aa887df5be7685992688ae"} err="failed to get container status \"a6499787e9bc87b23f95ff3ab051f4595b715c4dc4aa887df5be7685992688ae\": rpc error: code = NotFound desc = could not find container \"a6499787e9bc87b23f95ff3ab051f4595b715c4dc4aa887df5be7685992688ae\": container with ID starting with a6499787e9bc87b23f95ff3ab051f4595b715c4dc4aa887df5be7685992688ae not found: ID does not exist" Oct 06 13:57:38 crc kubenswrapper[4757]: I1006 13:57:38.923774 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-646975b848-hkqzh"] Oct 06 13:57:39 crc kubenswrapper[4757]: I1006 13:57:39.121587 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Oct 06 13:57:39 crc kubenswrapper[4757]: E1006 13:57:39.126693 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fdb7d73e-b6bf-4276-8633-45746b12cc1b" containerName="barbican-api" Oct 06 13:57:39 crc kubenswrapper[4757]: I1006 13:57:39.126719 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="fdb7d73e-b6bf-4276-8633-45746b12cc1b" containerName="barbican-api" Oct 06 13:57:39 crc kubenswrapper[4757]: E1006 13:57:39.126749 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f705f558-519e-489e-8ff4-5b3eb4476eff" containerName="cinder-db-sync" Oct 06 13:57:39 crc kubenswrapper[4757]: I1006 13:57:39.126756 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="f705f558-519e-489e-8ff4-5b3eb4476eff" containerName="cinder-db-sync" Oct 06 13:57:39 crc kubenswrapper[4757]: E1006 13:57:39.126780 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fdb7d73e-b6bf-4276-8633-45746b12cc1b" containerName="barbican-api-log" Oct 06 13:57:39 crc kubenswrapper[4757]: I1006 13:57:39.126787 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="fdb7d73e-b6bf-4276-8633-45746b12cc1b" containerName="barbican-api-log" Oct 06 13:57:39 crc kubenswrapper[4757]: I1006 13:57:39.126955 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="f705f558-519e-489e-8ff4-5b3eb4476eff" containerName="cinder-db-sync" Oct 06 13:57:39 crc kubenswrapper[4757]: I1006 13:57:39.126970 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="fdb7d73e-b6bf-4276-8633-45746b12cc1b" containerName="barbican-api-log" Oct 06 13:57:39 crc kubenswrapper[4757]: I1006 13:57:39.126987 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="fdb7d73e-b6bf-4276-8633-45746b12cc1b" containerName="barbican-api" Oct 06 13:57:39 crc kubenswrapper[4757]: I1006 13:57:39.127942 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 06 13:57:39 crc kubenswrapper[4757]: I1006 13:57:39.131654 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-w5d5j" Oct 06 13:57:39 crc kubenswrapper[4757]: I1006 13:57:39.131818 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Oct 06 13:57:39 crc kubenswrapper[4757]: I1006 13:57:39.131897 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Oct 06 13:57:39 crc kubenswrapper[4757]: I1006 13:57:39.131991 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Oct 06 13:57:39 crc kubenswrapper[4757]: I1006 13:57:39.140309 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 06 13:57:39 crc kubenswrapper[4757]: I1006 13:57:39.161012 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3319038f-4b3b-47c4-880b-6dc356283248-scripts\") pod \"cinder-scheduler-0\" (UID: \"3319038f-4b3b-47c4-880b-6dc356283248\") " pod="openstack/cinder-scheduler-0" Oct 06 13:57:39 crc kubenswrapper[4757]: I1006 13:57:39.161063 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vdh8w\" (UniqueName: \"kubernetes.io/projected/3319038f-4b3b-47c4-880b-6dc356283248-kube-api-access-vdh8w\") pod \"cinder-scheduler-0\" (UID: \"3319038f-4b3b-47c4-880b-6dc356283248\") " pod="openstack/cinder-scheduler-0" Oct 06 13:57:39 crc kubenswrapper[4757]: I1006 13:57:39.161134 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3319038f-4b3b-47c4-880b-6dc356283248-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"3319038f-4b3b-47c4-880b-6dc356283248\") " pod="openstack/cinder-scheduler-0" Oct 06 13:57:39 crc kubenswrapper[4757]: I1006 13:57:39.161317 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3319038f-4b3b-47c4-880b-6dc356283248-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"3319038f-4b3b-47c4-880b-6dc356283248\") " pod="openstack/cinder-scheduler-0" Oct 06 13:57:39 crc kubenswrapper[4757]: I1006 13:57:39.161372 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3319038f-4b3b-47c4-880b-6dc356283248-config-data\") pod \"cinder-scheduler-0\" (UID: \"3319038f-4b3b-47c4-880b-6dc356283248\") " pod="openstack/cinder-scheduler-0" Oct 06 13:57:39 crc kubenswrapper[4757]: I1006 13:57:39.161437 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/3319038f-4b3b-47c4-880b-6dc356283248-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"3319038f-4b3b-47c4-880b-6dc356283248\") " pod="openstack/cinder-scheduler-0" Oct 06 13:57:39 crc kubenswrapper[4757]: I1006 13:57:39.207450 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-67bc848c5c-dclqc"] Oct 06 13:57:39 crc kubenswrapper[4757]: I1006 13:57:39.208909 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67bc848c5c-dclqc" Oct 06 13:57:39 crc kubenswrapper[4757]: I1006 13:57:39.221325 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-67bc848c5c-dclqc"] Oct 06 13:57:39 crc kubenswrapper[4757]: I1006 13:57:39.262728 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2ede69c8-f24c-4bf1-82b6-d28d349fb686-config\") pod \"dnsmasq-dns-67bc848c5c-dclqc\" (UID: \"2ede69c8-f24c-4bf1-82b6-d28d349fb686\") " pod="openstack/dnsmasq-dns-67bc848c5c-dclqc" Oct 06 13:57:39 crc kubenswrapper[4757]: I1006 13:57:39.262798 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2ede69c8-f24c-4bf1-82b6-d28d349fb686-dns-svc\") pod \"dnsmasq-dns-67bc848c5c-dclqc\" (UID: \"2ede69c8-f24c-4bf1-82b6-d28d349fb686\") " pod="openstack/dnsmasq-dns-67bc848c5c-dclqc" Oct 06 13:57:39 crc kubenswrapper[4757]: I1006 13:57:39.262867 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2ede69c8-f24c-4bf1-82b6-d28d349fb686-ovsdbserver-sb\") pod \"dnsmasq-dns-67bc848c5c-dclqc\" (UID: \"2ede69c8-f24c-4bf1-82b6-d28d349fb686\") " pod="openstack/dnsmasq-dns-67bc848c5c-dclqc" Oct 06 13:57:39 crc kubenswrapper[4757]: I1006 13:57:39.262916 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2ede69c8-f24c-4bf1-82b6-d28d349fb686-ovsdbserver-nb\") pod \"dnsmasq-dns-67bc848c5c-dclqc\" (UID: \"2ede69c8-f24c-4bf1-82b6-d28d349fb686\") " pod="openstack/dnsmasq-dns-67bc848c5c-dclqc" Oct 06 13:57:39 crc kubenswrapper[4757]: I1006 13:57:39.262939 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3319038f-4b3b-47c4-880b-6dc356283248-scripts\") pod \"cinder-scheduler-0\" (UID: \"3319038f-4b3b-47c4-880b-6dc356283248\") " pod="openstack/cinder-scheduler-0" Oct 06 13:57:39 crc kubenswrapper[4757]: I1006 13:57:39.262965 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vdh8w\" (UniqueName: \"kubernetes.io/projected/3319038f-4b3b-47c4-880b-6dc356283248-kube-api-access-vdh8w\") pod \"cinder-scheduler-0\" (UID: \"3319038f-4b3b-47c4-880b-6dc356283248\") " pod="openstack/cinder-scheduler-0" Oct 06 13:57:39 crc kubenswrapper[4757]: I1006 13:57:39.262993 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3319038f-4b3b-47c4-880b-6dc356283248-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"3319038f-4b3b-47c4-880b-6dc356283248\") " pod="openstack/cinder-scheduler-0" Oct 06 13:57:39 crc kubenswrapper[4757]: I1006 13:57:39.263029 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3319038f-4b3b-47c4-880b-6dc356283248-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"3319038f-4b3b-47c4-880b-6dc356283248\") " pod="openstack/cinder-scheduler-0" Oct 06 13:57:39 crc kubenswrapper[4757]: I1006 13:57:39.263051 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4rqww\" (UniqueName: \"kubernetes.io/projected/2ede69c8-f24c-4bf1-82b6-d28d349fb686-kube-api-access-4rqww\") pod \"dnsmasq-dns-67bc848c5c-dclqc\" (UID: \"2ede69c8-f24c-4bf1-82b6-d28d349fb686\") " pod="openstack/dnsmasq-dns-67bc848c5c-dclqc" Oct 06 13:57:39 crc kubenswrapper[4757]: I1006 13:57:39.263070 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3319038f-4b3b-47c4-880b-6dc356283248-config-data\") pod \"cinder-scheduler-0\" (UID: \"3319038f-4b3b-47c4-880b-6dc356283248\") " pod="openstack/cinder-scheduler-0" Oct 06 13:57:39 crc kubenswrapper[4757]: I1006 13:57:39.263117 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2ede69c8-f24c-4bf1-82b6-d28d349fb686-dns-swift-storage-0\") pod \"dnsmasq-dns-67bc848c5c-dclqc\" (UID: \"2ede69c8-f24c-4bf1-82b6-d28d349fb686\") " pod="openstack/dnsmasq-dns-67bc848c5c-dclqc" Oct 06 13:57:39 crc kubenswrapper[4757]: I1006 13:57:39.263173 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/3319038f-4b3b-47c4-880b-6dc356283248-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"3319038f-4b3b-47c4-880b-6dc356283248\") " pod="openstack/cinder-scheduler-0" Oct 06 13:57:39 crc kubenswrapper[4757]: I1006 13:57:39.263249 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/3319038f-4b3b-47c4-880b-6dc356283248-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"3319038f-4b3b-47c4-880b-6dc356283248\") " pod="openstack/cinder-scheduler-0" Oct 06 13:57:39 crc kubenswrapper[4757]: I1006 13:57:39.272058 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3319038f-4b3b-47c4-880b-6dc356283248-scripts\") pod \"cinder-scheduler-0\" (UID: \"3319038f-4b3b-47c4-880b-6dc356283248\") " pod="openstack/cinder-scheduler-0" Oct 06 13:57:39 crc kubenswrapper[4757]: I1006 13:57:39.277596 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3319038f-4b3b-47c4-880b-6dc356283248-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"3319038f-4b3b-47c4-880b-6dc356283248\") " pod="openstack/cinder-scheduler-0" Oct 06 13:57:39 crc kubenswrapper[4757]: I1006 13:57:39.288500 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vdh8w\" (UniqueName: \"kubernetes.io/projected/3319038f-4b3b-47c4-880b-6dc356283248-kube-api-access-vdh8w\") pod \"cinder-scheduler-0\" (UID: \"3319038f-4b3b-47c4-880b-6dc356283248\") " pod="openstack/cinder-scheduler-0" Oct 06 13:57:39 crc kubenswrapper[4757]: I1006 13:57:39.289194 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3319038f-4b3b-47c4-880b-6dc356283248-config-data\") pod \"cinder-scheduler-0\" (UID: \"3319038f-4b3b-47c4-880b-6dc356283248\") " pod="openstack/cinder-scheduler-0" Oct 06 13:57:39 crc kubenswrapper[4757]: I1006 13:57:39.299295 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3319038f-4b3b-47c4-880b-6dc356283248-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"3319038f-4b3b-47c4-880b-6dc356283248\") " pod="openstack/cinder-scheduler-0" Oct 06 13:57:39 crc kubenswrapper[4757]: I1006 13:57:39.366610 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4rqww\" (UniqueName: \"kubernetes.io/projected/2ede69c8-f24c-4bf1-82b6-d28d349fb686-kube-api-access-4rqww\") pod \"dnsmasq-dns-67bc848c5c-dclqc\" (UID: \"2ede69c8-f24c-4bf1-82b6-d28d349fb686\") " pod="openstack/dnsmasq-dns-67bc848c5c-dclqc" Oct 06 13:57:39 crc kubenswrapper[4757]: I1006 13:57:39.366675 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2ede69c8-f24c-4bf1-82b6-d28d349fb686-dns-swift-storage-0\") pod \"dnsmasq-dns-67bc848c5c-dclqc\" (UID: \"2ede69c8-f24c-4bf1-82b6-d28d349fb686\") " pod="openstack/dnsmasq-dns-67bc848c5c-dclqc" Oct 06 13:57:39 crc kubenswrapper[4757]: I1006 13:57:39.366736 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2ede69c8-f24c-4bf1-82b6-d28d349fb686-config\") pod \"dnsmasq-dns-67bc848c5c-dclqc\" (UID: \"2ede69c8-f24c-4bf1-82b6-d28d349fb686\") " pod="openstack/dnsmasq-dns-67bc848c5c-dclqc" Oct 06 13:57:39 crc kubenswrapper[4757]: I1006 13:57:39.366788 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2ede69c8-f24c-4bf1-82b6-d28d349fb686-dns-svc\") pod \"dnsmasq-dns-67bc848c5c-dclqc\" (UID: \"2ede69c8-f24c-4bf1-82b6-d28d349fb686\") " pod="openstack/dnsmasq-dns-67bc848c5c-dclqc" Oct 06 13:57:39 crc kubenswrapper[4757]: I1006 13:57:39.366846 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2ede69c8-f24c-4bf1-82b6-d28d349fb686-ovsdbserver-sb\") pod \"dnsmasq-dns-67bc848c5c-dclqc\" (UID: \"2ede69c8-f24c-4bf1-82b6-d28d349fb686\") " pod="openstack/dnsmasq-dns-67bc848c5c-dclqc" Oct 06 13:57:39 crc kubenswrapper[4757]: I1006 13:57:39.366870 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2ede69c8-f24c-4bf1-82b6-d28d349fb686-ovsdbserver-nb\") pod \"dnsmasq-dns-67bc848c5c-dclqc\" (UID: \"2ede69c8-f24c-4bf1-82b6-d28d349fb686\") " pod="openstack/dnsmasq-dns-67bc848c5c-dclqc" Oct 06 13:57:39 crc kubenswrapper[4757]: I1006 13:57:39.367833 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2ede69c8-f24c-4bf1-82b6-d28d349fb686-config\") pod \"dnsmasq-dns-67bc848c5c-dclqc\" (UID: \"2ede69c8-f24c-4bf1-82b6-d28d349fb686\") " pod="openstack/dnsmasq-dns-67bc848c5c-dclqc" Oct 06 13:57:39 crc kubenswrapper[4757]: I1006 13:57:39.368398 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2ede69c8-f24c-4bf1-82b6-d28d349fb686-dns-svc\") pod \"dnsmasq-dns-67bc848c5c-dclqc\" (UID: \"2ede69c8-f24c-4bf1-82b6-d28d349fb686\") " pod="openstack/dnsmasq-dns-67bc848c5c-dclqc" Oct 06 13:57:39 crc kubenswrapper[4757]: I1006 13:57:39.368509 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2ede69c8-f24c-4bf1-82b6-d28d349fb686-dns-swift-storage-0\") pod \"dnsmasq-dns-67bc848c5c-dclqc\" (UID: \"2ede69c8-f24c-4bf1-82b6-d28d349fb686\") " pod="openstack/dnsmasq-dns-67bc848c5c-dclqc" Oct 06 13:57:39 crc kubenswrapper[4757]: I1006 13:57:39.370620 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2ede69c8-f24c-4bf1-82b6-d28d349fb686-ovsdbserver-nb\") pod \"dnsmasq-dns-67bc848c5c-dclqc\" (UID: \"2ede69c8-f24c-4bf1-82b6-d28d349fb686\") " pod="openstack/dnsmasq-dns-67bc848c5c-dclqc" Oct 06 13:57:39 crc kubenswrapper[4757]: I1006 13:57:39.374057 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2ede69c8-f24c-4bf1-82b6-d28d349fb686-ovsdbserver-sb\") pod \"dnsmasq-dns-67bc848c5c-dclqc\" (UID: \"2ede69c8-f24c-4bf1-82b6-d28d349fb686\") " pod="openstack/dnsmasq-dns-67bc848c5c-dclqc" Oct 06 13:57:39 crc kubenswrapper[4757]: I1006 13:57:39.389199 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4rqww\" (UniqueName: \"kubernetes.io/projected/2ede69c8-f24c-4bf1-82b6-d28d349fb686-kube-api-access-4rqww\") pod \"dnsmasq-dns-67bc848c5c-dclqc\" (UID: \"2ede69c8-f24c-4bf1-82b6-d28d349fb686\") " pod="openstack/dnsmasq-dns-67bc848c5c-dclqc" Oct 06 13:57:39 crc kubenswrapper[4757]: I1006 13:57:39.431212 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Oct 06 13:57:39 crc kubenswrapper[4757]: I1006 13:57:39.433750 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 06 13:57:39 crc kubenswrapper[4757]: I1006 13:57:39.443568 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Oct 06 13:57:39 crc kubenswrapper[4757]: I1006 13:57:39.446135 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Oct 06 13:57:39 crc kubenswrapper[4757]: I1006 13:57:39.480783 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1a091d6c-d09a-4218-8175-e468a035f3b0-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"1a091d6c-d09a-4218-8175-e468a035f3b0\") " pod="openstack/cinder-api-0" Oct 06 13:57:39 crc kubenswrapper[4757]: I1006 13:57:39.480930 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1a091d6c-d09a-4218-8175-e468a035f3b0-logs\") pod \"cinder-api-0\" (UID: \"1a091d6c-d09a-4218-8175-e468a035f3b0\") " pod="openstack/cinder-api-0" Oct 06 13:57:39 crc kubenswrapper[4757]: I1006 13:57:39.481003 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1a091d6c-d09a-4218-8175-e468a035f3b0-config-data\") pod \"cinder-api-0\" (UID: \"1a091d6c-d09a-4218-8175-e468a035f3b0\") " pod="openstack/cinder-api-0" Oct 06 13:57:39 crc kubenswrapper[4757]: I1006 13:57:39.481163 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1a091d6c-d09a-4218-8175-e468a035f3b0-config-data-custom\") pod \"cinder-api-0\" (UID: \"1a091d6c-d09a-4218-8175-e468a035f3b0\") " pod="openstack/cinder-api-0" Oct 06 13:57:39 crc kubenswrapper[4757]: I1006 13:57:39.481299 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/1a091d6c-d09a-4218-8175-e468a035f3b0-etc-machine-id\") pod \"cinder-api-0\" (UID: \"1a091d6c-d09a-4218-8175-e468a035f3b0\") " pod="openstack/cinder-api-0" Oct 06 13:57:39 crc kubenswrapper[4757]: I1006 13:57:39.481335 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k4fx6\" (UniqueName: \"kubernetes.io/projected/1a091d6c-d09a-4218-8175-e468a035f3b0-kube-api-access-k4fx6\") pod \"cinder-api-0\" (UID: \"1a091d6c-d09a-4218-8175-e468a035f3b0\") " pod="openstack/cinder-api-0" Oct 06 13:57:39 crc kubenswrapper[4757]: I1006 13:57:39.481473 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1a091d6c-d09a-4218-8175-e468a035f3b0-scripts\") pod \"cinder-api-0\" (UID: \"1a091d6c-d09a-4218-8175-e468a035f3b0\") " pod="openstack/cinder-api-0" Oct 06 13:57:39 crc kubenswrapper[4757]: I1006 13:57:39.500216 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 06 13:57:39 crc kubenswrapper[4757]: I1006 13:57:39.531193 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67bc848c5c-dclqc" Oct 06 13:57:39 crc kubenswrapper[4757]: I1006 13:57:39.588153 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1a091d6c-d09a-4218-8175-e468a035f3b0-config-data\") pod \"cinder-api-0\" (UID: \"1a091d6c-d09a-4218-8175-e468a035f3b0\") " pod="openstack/cinder-api-0" Oct 06 13:57:39 crc kubenswrapper[4757]: I1006 13:57:39.588203 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1a091d6c-d09a-4218-8175-e468a035f3b0-config-data-custom\") pod \"cinder-api-0\" (UID: \"1a091d6c-d09a-4218-8175-e468a035f3b0\") " pod="openstack/cinder-api-0" Oct 06 13:57:39 crc kubenswrapper[4757]: I1006 13:57:39.588271 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/1a091d6c-d09a-4218-8175-e468a035f3b0-etc-machine-id\") pod \"cinder-api-0\" (UID: \"1a091d6c-d09a-4218-8175-e468a035f3b0\") " pod="openstack/cinder-api-0" Oct 06 13:57:39 crc kubenswrapper[4757]: I1006 13:57:39.588303 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k4fx6\" (UniqueName: \"kubernetes.io/projected/1a091d6c-d09a-4218-8175-e468a035f3b0-kube-api-access-k4fx6\") pod \"cinder-api-0\" (UID: \"1a091d6c-d09a-4218-8175-e468a035f3b0\") " pod="openstack/cinder-api-0" Oct 06 13:57:39 crc kubenswrapper[4757]: I1006 13:57:39.588348 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1a091d6c-d09a-4218-8175-e468a035f3b0-scripts\") pod \"cinder-api-0\" (UID: \"1a091d6c-d09a-4218-8175-e468a035f3b0\") " pod="openstack/cinder-api-0" Oct 06 13:57:39 crc kubenswrapper[4757]: I1006 13:57:39.588375 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1a091d6c-d09a-4218-8175-e468a035f3b0-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"1a091d6c-d09a-4218-8175-e468a035f3b0\") " pod="openstack/cinder-api-0" Oct 06 13:57:39 crc kubenswrapper[4757]: I1006 13:57:39.588394 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1a091d6c-d09a-4218-8175-e468a035f3b0-logs\") pod \"cinder-api-0\" (UID: \"1a091d6c-d09a-4218-8175-e468a035f3b0\") " pod="openstack/cinder-api-0" Oct 06 13:57:39 crc kubenswrapper[4757]: I1006 13:57:39.595644 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1a091d6c-d09a-4218-8175-e468a035f3b0-logs\") pod \"cinder-api-0\" (UID: \"1a091d6c-d09a-4218-8175-e468a035f3b0\") " pod="openstack/cinder-api-0" Oct 06 13:57:39 crc kubenswrapper[4757]: I1006 13:57:39.595731 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/1a091d6c-d09a-4218-8175-e468a035f3b0-etc-machine-id\") pod \"cinder-api-0\" (UID: \"1a091d6c-d09a-4218-8175-e468a035f3b0\") " pod="openstack/cinder-api-0" Oct 06 13:57:39 crc kubenswrapper[4757]: I1006 13:57:39.605991 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1a091d6c-d09a-4218-8175-e468a035f3b0-config-data\") pod \"cinder-api-0\" (UID: \"1a091d6c-d09a-4218-8175-e468a035f3b0\") " pod="openstack/cinder-api-0" Oct 06 13:57:39 crc kubenswrapper[4757]: I1006 13:57:39.614159 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1a091d6c-d09a-4218-8175-e468a035f3b0-config-data-custom\") pod \"cinder-api-0\" (UID: \"1a091d6c-d09a-4218-8175-e468a035f3b0\") " pod="openstack/cinder-api-0" Oct 06 13:57:39 crc kubenswrapper[4757]: I1006 13:57:39.620564 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1a091d6c-d09a-4218-8175-e468a035f3b0-scripts\") pod \"cinder-api-0\" (UID: \"1a091d6c-d09a-4218-8175-e468a035f3b0\") " pod="openstack/cinder-api-0" Oct 06 13:57:39 crc kubenswrapper[4757]: I1006 13:57:39.626185 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1a091d6c-d09a-4218-8175-e468a035f3b0-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"1a091d6c-d09a-4218-8175-e468a035f3b0\") " pod="openstack/cinder-api-0" Oct 06 13:57:39 crc kubenswrapper[4757]: I1006 13:57:39.628807 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k4fx6\" (UniqueName: \"kubernetes.io/projected/1a091d6c-d09a-4218-8175-e468a035f3b0-kube-api-access-k4fx6\") pod \"cinder-api-0\" (UID: \"1a091d6c-d09a-4218-8175-e468a035f3b0\") " pod="openstack/cinder-api-0" Oct 06 13:57:39 crc kubenswrapper[4757]: I1006 13:57:39.784664 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 06 13:57:40 crc kubenswrapper[4757]: I1006 13:57:40.013017 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-67bc848c5c-dclqc"] Oct 06 13:57:40 crc kubenswrapper[4757]: W1006 13:57:40.033870 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2ede69c8_f24c_4bf1_82b6_d28d349fb686.slice/crio-1cdd35eb62cc858b6e0b2b4177ecad7b12e848b20099d8e27623696710521149 WatchSource:0}: Error finding container 1cdd35eb62cc858b6e0b2b4177ecad7b12e848b20099d8e27623696710521149: Status 404 returned error can't find the container with id 1cdd35eb62cc858b6e0b2b4177ecad7b12e848b20099d8e27623696710521149 Oct 06 13:57:40 crc kubenswrapper[4757]: I1006 13:57:40.096240 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 06 13:57:40 crc kubenswrapper[4757]: W1006 13:57:40.103681 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3319038f_4b3b_47c4_880b_6dc356283248.slice/crio-c0560fe1b4a8dd964b5909bcd448b2158908daddd2d0a9a481be7e62032e6360 WatchSource:0}: Error finding container c0560fe1b4a8dd964b5909bcd448b2158908daddd2d0a9a481be7e62032e6360: Status 404 returned error can't find the container with id c0560fe1b4a8dd964b5909bcd448b2158908daddd2d0a9a481be7e62032e6360 Oct 06 13:57:40 crc kubenswrapper[4757]: I1006 13:57:40.190574 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fdb7d73e-b6bf-4276-8633-45746b12cc1b" path="/var/lib/kubelet/pods/fdb7d73e-b6bf-4276-8633-45746b12cc1b/volumes" Oct 06 13:57:40 crc kubenswrapper[4757]: I1006 13:57:40.280037 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Oct 06 13:57:40 crc kubenswrapper[4757]: W1006 13:57:40.290853 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1a091d6c_d09a_4218_8175_e468a035f3b0.slice/crio-de9a0df6214d3e9ecb19be3f2062286920b885fddd1e73be4547bb678dd4f6d3 WatchSource:0}: Error finding container de9a0df6214d3e9ecb19be3f2062286920b885fddd1e73be4547bb678dd4f6d3: Status 404 returned error can't find the container with id de9a0df6214d3e9ecb19be3f2062286920b885fddd1e73be4547bb678dd4f6d3 Oct 06 13:57:40 crc kubenswrapper[4757]: I1006 13:57:40.562431 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-proxy-7c7848899c-8bm7g"] Oct 06 13:57:40 crc kubenswrapper[4757]: I1006 13:57:40.564170 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-7c7848899c-8bm7g" Oct 06 13:57:40 crc kubenswrapper[4757]: I1006 13:57:40.567937 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-public-svc" Oct 06 13:57:40 crc kubenswrapper[4757]: I1006 13:57:40.568166 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Oct 06 13:57:40 crc kubenswrapper[4757]: I1006 13:57:40.568314 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-internal-svc" Oct 06 13:57:40 crc kubenswrapper[4757]: I1006 13:57:40.579392 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-7c7848899c-8bm7g"] Oct 06 13:57:40 crc kubenswrapper[4757]: I1006 13:57:40.709894 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec6e1479-3b96-4c4b-be95-5834172d37ff-config-data\") pod \"swift-proxy-7c7848899c-8bm7g\" (UID: \"ec6e1479-3b96-4c4b-be95-5834172d37ff\") " pod="openstack/swift-proxy-7c7848899c-8bm7g" Oct 06 13:57:40 crc kubenswrapper[4757]: I1006 13:57:40.709959 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ec6e1479-3b96-4c4b-be95-5834172d37ff-log-httpd\") pod \"swift-proxy-7c7848899c-8bm7g\" (UID: \"ec6e1479-3b96-4c4b-be95-5834172d37ff\") " pod="openstack/swift-proxy-7c7848899c-8bm7g" Oct 06 13:57:40 crc kubenswrapper[4757]: I1006 13:57:40.710112 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lkv59\" (UniqueName: \"kubernetes.io/projected/ec6e1479-3b96-4c4b-be95-5834172d37ff-kube-api-access-lkv59\") pod \"swift-proxy-7c7848899c-8bm7g\" (UID: \"ec6e1479-3b96-4c4b-be95-5834172d37ff\") " pod="openstack/swift-proxy-7c7848899c-8bm7g" Oct 06 13:57:40 crc kubenswrapper[4757]: I1006 13:57:40.710183 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ec6e1479-3b96-4c4b-be95-5834172d37ff-run-httpd\") pod \"swift-proxy-7c7848899c-8bm7g\" (UID: \"ec6e1479-3b96-4c4b-be95-5834172d37ff\") " pod="openstack/swift-proxy-7c7848899c-8bm7g" Oct 06 13:57:40 crc kubenswrapper[4757]: I1006 13:57:40.710231 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ec6e1479-3b96-4c4b-be95-5834172d37ff-public-tls-certs\") pod \"swift-proxy-7c7848899c-8bm7g\" (UID: \"ec6e1479-3b96-4c4b-be95-5834172d37ff\") " pod="openstack/swift-proxy-7c7848899c-8bm7g" Oct 06 13:57:40 crc kubenswrapper[4757]: I1006 13:57:40.710296 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/ec6e1479-3b96-4c4b-be95-5834172d37ff-etc-swift\") pod \"swift-proxy-7c7848899c-8bm7g\" (UID: \"ec6e1479-3b96-4c4b-be95-5834172d37ff\") " pod="openstack/swift-proxy-7c7848899c-8bm7g" Oct 06 13:57:40 crc kubenswrapper[4757]: I1006 13:57:40.710324 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec6e1479-3b96-4c4b-be95-5834172d37ff-combined-ca-bundle\") pod \"swift-proxy-7c7848899c-8bm7g\" (UID: \"ec6e1479-3b96-4c4b-be95-5834172d37ff\") " pod="openstack/swift-proxy-7c7848899c-8bm7g" Oct 06 13:57:40 crc kubenswrapper[4757]: I1006 13:57:40.710450 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ec6e1479-3b96-4c4b-be95-5834172d37ff-internal-tls-certs\") pod \"swift-proxy-7c7848899c-8bm7g\" (UID: \"ec6e1479-3b96-4c4b-be95-5834172d37ff\") " pod="openstack/swift-proxy-7c7848899c-8bm7g" Oct 06 13:57:40 crc kubenswrapper[4757]: I1006 13:57:40.811859 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/ec6e1479-3b96-4c4b-be95-5834172d37ff-etc-swift\") pod \"swift-proxy-7c7848899c-8bm7g\" (UID: \"ec6e1479-3b96-4c4b-be95-5834172d37ff\") " pod="openstack/swift-proxy-7c7848899c-8bm7g" Oct 06 13:57:40 crc kubenswrapper[4757]: I1006 13:57:40.811902 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec6e1479-3b96-4c4b-be95-5834172d37ff-combined-ca-bundle\") pod \"swift-proxy-7c7848899c-8bm7g\" (UID: \"ec6e1479-3b96-4c4b-be95-5834172d37ff\") " pod="openstack/swift-proxy-7c7848899c-8bm7g" Oct 06 13:57:40 crc kubenswrapper[4757]: I1006 13:57:40.811937 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ec6e1479-3b96-4c4b-be95-5834172d37ff-internal-tls-certs\") pod \"swift-proxy-7c7848899c-8bm7g\" (UID: \"ec6e1479-3b96-4c4b-be95-5834172d37ff\") " pod="openstack/swift-proxy-7c7848899c-8bm7g" Oct 06 13:57:40 crc kubenswrapper[4757]: I1006 13:57:40.811965 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec6e1479-3b96-4c4b-be95-5834172d37ff-config-data\") pod \"swift-proxy-7c7848899c-8bm7g\" (UID: \"ec6e1479-3b96-4c4b-be95-5834172d37ff\") " pod="openstack/swift-proxy-7c7848899c-8bm7g" Oct 06 13:57:40 crc kubenswrapper[4757]: I1006 13:57:40.811994 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ec6e1479-3b96-4c4b-be95-5834172d37ff-log-httpd\") pod \"swift-proxy-7c7848899c-8bm7g\" (UID: \"ec6e1479-3b96-4c4b-be95-5834172d37ff\") " pod="openstack/swift-proxy-7c7848899c-8bm7g" Oct 06 13:57:40 crc kubenswrapper[4757]: I1006 13:57:40.812059 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lkv59\" (UniqueName: \"kubernetes.io/projected/ec6e1479-3b96-4c4b-be95-5834172d37ff-kube-api-access-lkv59\") pod \"swift-proxy-7c7848899c-8bm7g\" (UID: \"ec6e1479-3b96-4c4b-be95-5834172d37ff\") " pod="openstack/swift-proxy-7c7848899c-8bm7g" Oct 06 13:57:40 crc kubenswrapper[4757]: I1006 13:57:40.812111 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ec6e1479-3b96-4c4b-be95-5834172d37ff-run-httpd\") pod \"swift-proxy-7c7848899c-8bm7g\" (UID: \"ec6e1479-3b96-4c4b-be95-5834172d37ff\") " pod="openstack/swift-proxy-7c7848899c-8bm7g" Oct 06 13:57:40 crc kubenswrapper[4757]: I1006 13:57:40.812144 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ec6e1479-3b96-4c4b-be95-5834172d37ff-public-tls-certs\") pod \"swift-proxy-7c7848899c-8bm7g\" (UID: \"ec6e1479-3b96-4c4b-be95-5834172d37ff\") " pod="openstack/swift-proxy-7c7848899c-8bm7g" Oct 06 13:57:40 crc kubenswrapper[4757]: I1006 13:57:40.815239 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ec6e1479-3b96-4c4b-be95-5834172d37ff-log-httpd\") pod \"swift-proxy-7c7848899c-8bm7g\" (UID: \"ec6e1479-3b96-4c4b-be95-5834172d37ff\") " pod="openstack/swift-proxy-7c7848899c-8bm7g" Oct 06 13:57:40 crc kubenswrapper[4757]: I1006 13:57:40.815569 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ec6e1479-3b96-4c4b-be95-5834172d37ff-run-httpd\") pod \"swift-proxy-7c7848899c-8bm7g\" (UID: \"ec6e1479-3b96-4c4b-be95-5834172d37ff\") " pod="openstack/swift-proxy-7c7848899c-8bm7g" Oct 06 13:57:40 crc kubenswrapper[4757]: I1006 13:57:40.818831 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec6e1479-3b96-4c4b-be95-5834172d37ff-combined-ca-bundle\") pod \"swift-proxy-7c7848899c-8bm7g\" (UID: \"ec6e1479-3b96-4c4b-be95-5834172d37ff\") " pod="openstack/swift-proxy-7c7848899c-8bm7g" Oct 06 13:57:40 crc kubenswrapper[4757]: I1006 13:57:40.824848 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec6e1479-3b96-4c4b-be95-5834172d37ff-config-data\") pod \"swift-proxy-7c7848899c-8bm7g\" (UID: \"ec6e1479-3b96-4c4b-be95-5834172d37ff\") " pod="openstack/swift-proxy-7c7848899c-8bm7g" Oct 06 13:57:40 crc kubenswrapper[4757]: I1006 13:57:40.828732 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ec6e1479-3b96-4c4b-be95-5834172d37ff-internal-tls-certs\") pod \"swift-proxy-7c7848899c-8bm7g\" (UID: \"ec6e1479-3b96-4c4b-be95-5834172d37ff\") " pod="openstack/swift-proxy-7c7848899c-8bm7g" Oct 06 13:57:40 crc kubenswrapper[4757]: I1006 13:57:40.838941 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/ec6e1479-3b96-4c4b-be95-5834172d37ff-etc-swift\") pod \"swift-proxy-7c7848899c-8bm7g\" (UID: \"ec6e1479-3b96-4c4b-be95-5834172d37ff\") " pod="openstack/swift-proxy-7c7848899c-8bm7g" Oct 06 13:57:40 crc kubenswrapper[4757]: I1006 13:57:40.839967 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ec6e1479-3b96-4c4b-be95-5834172d37ff-public-tls-certs\") pod \"swift-proxy-7c7848899c-8bm7g\" (UID: \"ec6e1479-3b96-4c4b-be95-5834172d37ff\") " pod="openstack/swift-proxy-7c7848899c-8bm7g" Oct 06 13:57:40 crc kubenswrapper[4757]: I1006 13:57:40.843009 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lkv59\" (UniqueName: \"kubernetes.io/projected/ec6e1479-3b96-4c4b-be95-5834172d37ff-kube-api-access-lkv59\") pod \"swift-proxy-7c7848899c-8bm7g\" (UID: \"ec6e1479-3b96-4c4b-be95-5834172d37ff\") " pod="openstack/swift-proxy-7c7848899c-8bm7g" Oct 06 13:57:40 crc kubenswrapper[4757]: I1006 13:57:40.889812 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-7c7848899c-8bm7g" Oct 06 13:57:40 crc kubenswrapper[4757]: I1006 13:57:40.897120 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"3319038f-4b3b-47c4-880b-6dc356283248","Type":"ContainerStarted","Data":"c0560fe1b4a8dd964b5909bcd448b2158908daddd2d0a9a481be7e62032e6360"} Oct 06 13:57:40 crc kubenswrapper[4757]: I1006 13:57:40.899647 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"1a091d6c-d09a-4218-8175-e468a035f3b0","Type":"ContainerStarted","Data":"de9a0df6214d3e9ecb19be3f2062286920b885fddd1e73be4547bb678dd4f6d3"} Oct 06 13:57:40 crc kubenswrapper[4757]: I1006 13:57:40.904045 4757 generic.go:334] "Generic (PLEG): container finished" podID="2ede69c8-f24c-4bf1-82b6-d28d349fb686" containerID="578b93cc240dbacbd80fe044625bbae34469aef7e7fb41649ffd6cb020e3d8ef" exitCode=0 Oct 06 13:57:40 crc kubenswrapper[4757]: I1006 13:57:40.904176 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67bc848c5c-dclqc" event={"ID":"2ede69c8-f24c-4bf1-82b6-d28d349fb686","Type":"ContainerDied","Data":"578b93cc240dbacbd80fe044625bbae34469aef7e7fb41649ffd6cb020e3d8ef"} Oct 06 13:57:40 crc kubenswrapper[4757]: I1006 13:57:40.904208 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67bc848c5c-dclqc" event={"ID":"2ede69c8-f24c-4bf1-82b6-d28d349fb686","Type":"ContainerStarted","Data":"1cdd35eb62cc858b6e0b2b4177ecad7b12e848b20099d8e27623696710521149"} Oct 06 13:57:41 crc kubenswrapper[4757]: I1006 13:57:41.690354 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Oct 06 13:57:41 crc kubenswrapper[4757]: I1006 13:57:41.782448 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-7c7848899c-8bm7g"] Oct 06 13:57:41 crc kubenswrapper[4757]: I1006 13:57:41.924826 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"1a091d6c-d09a-4218-8175-e468a035f3b0","Type":"ContainerStarted","Data":"6d0ff4b524eee02a4885aa0283a6eb43fe1edee7e5122f952f1bd5d6c366c917"} Oct 06 13:57:41 crc kubenswrapper[4757]: I1006 13:57:41.927270 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67bc848c5c-dclqc" event={"ID":"2ede69c8-f24c-4bf1-82b6-d28d349fb686","Type":"ContainerStarted","Data":"b98bcddeb47dcac8014d7c6dfc9cc3eca9ab1c8761ddfa47739a393cb7f1ea2b"} Oct 06 13:57:41 crc kubenswrapper[4757]: I1006 13:57:41.928570 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-67bc848c5c-dclqc" Oct 06 13:57:41 crc kubenswrapper[4757]: I1006 13:57:41.932865 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-7c7848899c-8bm7g" event={"ID":"ec6e1479-3b96-4c4b-be95-5834172d37ff","Type":"ContainerStarted","Data":"e883a3dae22fdf60514c219a9283e92bb5abf320e9d17268731d006765c50594"} Oct 06 13:57:41 crc kubenswrapper[4757]: I1006 13:57:41.946020 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-67bc848c5c-dclqc" podStartSLOduration=2.946001662 podStartE2EDuration="2.946001662s" podCreationTimestamp="2025-10-06 13:57:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:57:41.943991441 +0000 UTC m=+1150.441309998" watchObservedRunningTime="2025-10-06 13:57:41.946001662 +0000 UTC m=+1150.443320199" Oct 06 13:57:42 crc kubenswrapper[4757]: I1006 13:57:42.948477 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="1a091d6c-d09a-4218-8175-e468a035f3b0" containerName="cinder-api-log" containerID="cri-o://6d0ff4b524eee02a4885aa0283a6eb43fe1edee7e5122f952f1bd5d6c366c917" gracePeriod=30 Oct 06 13:57:42 crc kubenswrapper[4757]: I1006 13:57:42.949248 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"1a091d6c-d09a-4218-8175-e468a035f3b0","Type":"ContainerStarted","Data":"a7824b4a479a46011e9adeedf308d2c26333410a44be3979147c3ea06b8a14b2"} Oct 06 13:57:42 crc kubenswrapper[4757]: I1006 13:57:42.949292 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Oct 06 13:57:42 crc kubenswrapper[4757]: I1006 13:57:42.949504 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="1a091d6c-d09a-4218-8175-e468a035f3b0" containerName="cinder-api" containerID="cri-o://a7824b4a479a46011e9adeedf308d2c26333410a44be3979147c3ea06b8a14b2" gracePeriod=30 Oct 06 13:57:42 crc kubenswrapper[4757]: I1006 13:57:42.960914 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"3319038f-4b3b-47c4-880b-6dc356283248","Type":"ContainerStarted","Data":"2fed91e04d37e588ad11a9573cbf6d5f7e04c37e956234e1c31609ed168f2e2e"} Oct 06 13:57:42 crc kubenswrapper[4757]: I1006 13:57:42.960966 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"3319038f-4b3b-47c4-880b-6dc356283248","Type":"ContainerStarted","Data":"8da7ce44e013c3489aeab6813581419f2abbece874c1d0f222bcbf9a26111934"} Oct 06 13:57:42 crc kubenswrapper[4757]: I1006 13:57:42.975245 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-7c7848899c-8bm7g" event={"ID":"ec6e1479-3b96-4c4b-be95-5834172d37ff","Type":"ContainerStarted","Data":"14022a75e2f882a054a2a8082d11133582e6585d36ef89867d94fb837bf54886"} Oct 06 13:57:42 crc kubenswrapper[4757]: I1006 13:57:42.975325 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-7c7848899c-8bm7g" event={"ID":"ec6e1479-3b96-4c4b-be95-5834172d37ff","Type":"ContainerStarted","Data":"ba1f45ef3932bf918c1b59fa1ed6629efa95b65ac36b643ce4bf6f517905a218"} Oct 06 13:57:42 crc kubenswrapper[4757]: I1006 13:57:42.976316 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-7c7848899c-8bm7g" Oct 06 13:57:42 crc kubenswrapper[4757]: I1006 13:57:42.976387 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-7c7848899c-8bm7g" Oct 06 13:57:43 crc kubenswrapper[4757]: I1006 13:57:43.005421 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=4.005392385 podStartE2EDuration="4.005392385s" podCreationTimestamp="2025-10-06 13:57:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:57:42.982463052 +0000 UTC m=+1151.479781579" watchObservedRunningTime="2025-10-06 13:57:43.005392385 +0000 UTC m=+1151.502710932" Oct 06 13:57:43 crc kubenswrapper[4757]: I1006 13:57:43.029282 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=2.788301671 podStartE2EDuration="4.029265946s" podCreationTimestamp="2025-10-06 13:57:39 +0000 UTC" firstStartedPulling="2025-10-06 13:57:40.106425775 +0000 UTC m=+1148.603744302" lastFinishedPulling="2025-10-06 13:57:41.34739004 +0000 UTC m=+1149.844708577" observedRunningTime="2025-10-06 13:57:43.027045528 +0000 UTC m=+1151.524364085" watchObservedRunningTime="2025-10-06 13:57:43.029265946 +0000 UTC m=+1151.526584473" Oct 06 13:57:43 crc kubenswrapper[4757]: I1006 13:57:43.041430 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-proxy-7c7848899c-8bm7g" podStartSLOduration=3.041407428 podStartE2EDuration="3.041407428s" podCreationTimestamp="2025-10-06 13:57:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:57:43.008588773 +0000 UTC m=+1151.505907320" watchObservedRunningTime="2025-10-06 13:57:43.041407428 +0000 UTC m=+1151.538725975" Oct 06 13:57:43 crc kubenswrapper[4757]: I1006 13:57:43.649545 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 06 13:57:43 crc kubenswrapper[4757]: I1006 13:57:43.723656 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1a091d6c-d09a-4218-8175-e468a035f3b0-config-data\") pod \"1a091d6c-d09a-4218-8175-e468a035f3b0\" (UID: \"1a091d6c-d09a-4218-8175-e468a035f3b0\") " Oct 06 13:57:43 crc kubenswrapper[4757]: I1006 13:57:43.723760 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1a091d6c-d09a-4218-8175-e468a035f3b0-combined-ca-bundle\") pod \"1a091d6c-d09a-4218-8175-e468a035f3b0\" (UID: \"1a091d6c-d09a-4218-8175-e468a035f3b0\") " Oct 06 13:57:43 crc kubenswrapper[4757]: I1006 13:57:43.723793 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1a091d6c-d09a-4218-8175-e468a035f3b0-logs\") pod \"1a091d6c-d09a-4218-8175-e468a035f3b0\" (UID: \"1a091d6c-d09a-4218-8175-e468a035f3b0\") " Oct 06 13:57:43 crc kubenswrapper[4757]: I1006 13:57:43.724032 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k4fx6\" (UniqueName: \"kubernetes.io/projected/1a091d6c-d09a-4218-8175-e468a035f3b0-kube-api-access-k4fx6\") pod \"1a091d6c-d09a-4218-8175-e468a035f3b0\" (UID: \"1a091d6c-d09a-4218-8175-e468a035f3b0\") " Oct 06 13:57:43 crc kubenswrapper[4757]: I1006 13:57:43.724102 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1a091d6c-d09a-4218-8175-e468a035f3b0-scripts\") pod \"1a091d6c-d09a-4218-8175-e468a035f3b0\" (UID: \"1a091d6c-d09a-4218-8175-e468a035f3b0\") " Oct 06 13:57:43 crc kubenswrapper[4757]: I1006 13:57:43.724151 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1a091d6c-d09a-4218-8175-e468a035f3b0-config-data-custom\") pod \"1a091d6c-d09a-4218-8175-e468a035f3b0\" (UID: \"1a091d6c-d09a-4218-8175-e468a035f3b0\") " Oct 06 13:57:43 crc kubenswrapper[4757]: I1006 13:57:43.724218 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/1a091d6c-d09a-4218-8175-e468a035f3b0-etc-machine-id\") pod \"1a091d6c-d09a-4218-8175-e468a035f3b0\" (UID: \"1a091d6c-d09a-4218-8175-e468a035f3b0\") " Oct 06 13:57:43 crc kubenswrapper[4757]: I1006 13:57:43.724675 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1a091d6c-d09a-4218-8175-e468a035f3b0-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "1a091d6c-d09a-4218-8175-e468a035f3b0" (UID: "1a091d6c-d09a-4218-8175-e468a035f3b0"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 06 13:57:43 crc kubenswrapper[4757]: I1006 13:57:43.730536 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1a091d6c-d09a-4218-8175-e468a035f3b0-logs" (OuterVolumeSpecName: "logs") pod "1a091d6c-d09a-4218-8175-e468a035f3b0" (UID: "1a091d6c-d09a-4218-8175-e468a035f3b0"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 13:57:43 crc kubenswrapper[4757]: I1006 13:57:43.739076 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1a091d6c-d09a-4218-8175-e468a035f3b0-kube-api-access-k4fx6" (OuterVolumeSpecName: "kube-api-access-k4fx6") pod "1a091d6c-d09a-4218-8175-e468a035f3b0" (UID: "1a091d6c-d09a-4218-8175-e468a035f3b0"). InnerVolumeSpecName "kube-api-access-k4fx6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:57:43 crc kubenswrapper[4757]: I1006 13:57:43.751700 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1a091d6c-d09a-4218-8175-e468a035f3b0-scripts" (OuterVolumeSpecName: "scripts") pod "1a091d6c-d09a-4218-8175-e468a035f3b0" (UID: "1a091d6c-d09a-4218-8175-e468a035f3b0"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:57:43 crc kubenswrapper[4757]: I1006 13:57:43.765788 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1a091d6c-d09a-4218-8175-e468a035f3b0-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "1a091d6c-d09a-4218-8175-e468a035f3b0" (UID: "1a091d6c-d09a-4218-8175-e468a035f3b0"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:57:43 crc kubenswrapper[4757]: I1006 13:57:43.806399 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 06 13:57:43 crc kubenswrapper[4757]: I1006 13:57:43.807299 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="13ea51c0-01eb-4d6c-b994-094b1fd1614e" containerName="ceilometer-central-agent" containerID="cri-o://0d8220d11024223a25ae3c1fb113885cdc619ad1d18ff73e2c04765ba9af2c83" gracePeriod=30 Oct 06 13:57:43 crc kubenswrapper[4757]: I1006 13:57:43.808829 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="13ea51c0-01eb-4d6c-b994-094b1fd1614e" containerName="proxy-httpd" containerID="cri-o://9faed425f06d2562824a823f5642d3e0f00b9c457a01fe9d85bff147745ff6f3" gracePeriod=30 Oct 06 13:57:43 crc kubenswrapper[4757]: I1006 13:57:43.809067 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="13ea51c0-01eb-4d6c-b994-094b1fd1614e" containerName="sg-core" containerID="cri-o://a3d694055a2f0251b8053f7c5838166ce1d777a6ee357dcf3f852e28846503d8" gracePeriod=30 Oct 06 13:57:43 crc kubenswrapper[4757]: I1006 13:57:43.809152 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="13ea51c0-01eb-4d6c-b994-094b1fd1614e" containerName="ceilometer-notification-agent" containerID="cri-o://d60c297b338571921c37dcacb6e2c17b2b97cdcb71e2e185c30834fb79c54817" gracePeriod=30 Oct 06 13:57:43 crc kubenswrapper[4757]: I1006 13:57:43.830307 4757 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1a091d6c-d09a-4218-8175-e468a035f3b0-logs\") on node \"crc\" DevicePath \"\"" Oct 06 13:57:43 crc kubenswrapper[4757]: I1006 13:57:43.830340 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k4fx6\" (UniqueName: \"kubernetes.io/projected/1a091d6c-d09a-4218-8175-e468a035f3b0-kube-api-access-k4fx6\") on node \"crc\" DevicePath \"\"" Oct 06 13:57:43 crc kubenswrapper[4757]: I1006 13:57:43.830350 4757 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1a091d6c-d09a-4218-8175-e468a035f3b0-scripts\") on node \"crc\" DevicePath \"\"" Oct 06 13:57:43 crc kubenswrapper[4757]: I1006 13:57:43.830358 4757 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1a091d6c-d09a-4218-8175-e468a035f3b0-config-data-custom\") on node \"crc\" DevicePath \"\"" Oct 06 13:57:43 crc kubenswrapper[4757]: I1006 13:57:43.830366 4757 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/1a091d6c-d09a-4218-8175-e468a035f3b0-etc-machine-id\") on node \"crc\" DevicePath \"\"" Oct 06 13:57:43 crc kubenswrapper[4757]: I1006 13:57:43.866705 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1a091d6c-d09a-4218-8175-e468a035f3b0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1a091d6c-d09a-4218-8175-e468a035f3b0" (UID: "1a091d6c-d09a-4218-8175-e468a035f3b0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:57:43 crc kubenswrapper[4757]: I1006 13:57:43.899199 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1a091d6c-d09a-4218-8175-e468a035f3b0-config-data" (OuterVolumeSpecName: "config-data") pod "1a091d6c-d09a-4218-8175-e468a035f3b0" (UID: "1a091d6c-d09a-4218-8175-e468a035f3b0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:57:43 crc kubenswrapper[4757]: I1006 13:57:43.931931 4757 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1a091d6c-d09a-4218-8175-e468a035f3b0-config-data\") on node \"crc\" DevicePath \"\"" Oct 06 13:57:43 crc kubenswrapper[4757]: I1006 13:57:43.931975 4757 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1a091d6c-d09a-4218-8175-e468a035f3b0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 06 13:57:43 crc kubenswrapper[4757]: I1006 13:57:43.984990 4757 generic.go:334] "Generic (PLEG): container finished" podID="13ea51c0-01eb-4d6c-b994-094b1fd1614e" containerID="9faed425f06d2562824a823f5642d3e0f00b9c457a01fe9d85bff147745ff6f3" exitCode=0 Oct 06 13:57:43 crc kubenswrapper[4757]: I1006 13:57:43.985031 4757 generic.go:334] "Generic (PLEG): container finished" podID="13ea51c0-01eb-4d6c-b994-094b1fd1614e" containerID="a3d694055a2f0251b8053f7c5838166ce1d777a6ee357dcf3f852e28846503d8" exitCode=2 Oct 06 13:57:43 crc kubenswrapper[4757]: I1006 13:57:43.985076 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"13ea51c0-01eb-4d6c-b994-094b1fd1614e","Type":"ContainerDied","Data":"9faed425f06d2562824a823f5642d3e0f00b9c457a01fe9d85bff147745ff6f3"} Oct 06 13:57:43 crc kubenswrapper[4757]: I1006 13:57:43.985130 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"13ea51c0-01eb-4d6c-b994-094b1fd1614e","Type":"ContainerDied","Data":"a3d694055a2f0251b8053f7c5838166ce1d777a6ee357dcf3f852e28846503d8"} Oct 06 13:57:43 crc kubenswrapper[4757]: I1006 13:57:43.989146 4757 generic.go:334] "Generic (PLEG): container finished" podID="1a091d6c-d09a-4218-8175-e468a035f3b0" containerID="a7824b4a479a46011e9adeedf308d2c26333410a44be3979147c3ea06b8a14b2" exitCode=0 Oct 06 13:57:43 crc kubenswrapper[4757]: I1006 13:57:43.989177 4757 generic.go:334] "Generic (PLEG): container finished" podID="1a091d6c-d09a-4218-8175-e468a035f3b0" containerID="6d0ff4b524eee02a4885aa0283a6eb43fe1edee7e5122f952f1bd5d6c366c917" exitCode=143 Oct 06 13:57:43 crc kubenswrapper[4757]: I1006 13:57:43.989981 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 06 13:57:43 crc kubenswrapper[4757]: I1006 13:57:43.996179 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"1a091d6c-d09a-4218-8175-e468a035f3b0","Type":"ContainerDied","Data":"a7824b4a479a46011e9adeedf308d2c26333410a44be3979147c3ea06b8a14b2"} Oct 06 13:57:43 crc kubenswrapper[4757]: I1006 13:57:43.996250 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"1a091d6c-d09a-4218-8175-e468a035f3b0","Type":"ContainerDied","Data":"6d0ff4b524eee02a4885aa0283a6eb43fe1edee7e5122f952f1bd5d6c366c917"} Oct 06 13:57:43 crc kubenswrapper[4757]: I1006 13:57:43.996263 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"1a091d6c-d09a-4218-8175-e468a035f3b0","Type":"ContainerDied","Data":"de9a0df6214d3e9ecb19be3f2062286920b885fddd1e73be4547bb678dd4f6d3"} Oct 06 13:57:43 crc kubenswrapper[4757]: I1006 13:57:43.996280 4757 scope.go:117] "RemoveContainer" containerID="a7824b4a479a46011e9adeedf308d2c26333410a44be3979147c3ea06b8a14b2" Oct 06 13:57:44 crc kubenswrapper[4757]: I1006 13:57:44.026561 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Oct 06 13:57:44 crc kubenswrapper[4757]: I1006 13:57:44.037541 4757 scope.go:117] "RemoveContainer" containerID="6d0ff4b524eee02a4885aa0283a6eb43fe1edee7e5122f952f1bd5d6c366c917" Oct 06 13:57:44 crc kubenswrapper[4757]: I1006 13:57:44.052470 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Oct 06 13:57:44 crc kubenswrapper[4757]: I1006 13:57:44.066274 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Oct 06 13:57:44 crc kubenswrapper[4757]: E1006 13:57:44.066715 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1a091d6c-d09a-4218-8175-e468a035f3b0" containerName="cinder-api-log" Oct 06 13:57:44 crc kubenswrapper[4757]: I1006 13:57:44.066741 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="1a091d6c-d09a-4218-8175-e468a035f3b0" containerName="cinder-api-log" Oct 06 13:57:44 crc kubenswrapper[4757]: E1006 13:57:44.066766 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1a091d6c-d09a-4218-8175-e468a035f3b0" containerName="cinder-api" Oct 06 13:57:44 crc kubenswrapper[4757]: I1006 13:57:44.066777 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="1a091d6c-d09a-4218-8175-e468a035f3b0" containerName="cinder-api" Oct 06 13:57:44 crc kubenswrapper[4757]: I1006 13:57:44.067050 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="1a091d6c-d09a-4218-8175-e468a035f3b0" containerName="cinder-api-log" Oct 06 13:57:44 crc kubenswrapper[4757]: I1006 13:57:44.067107 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="1a091d6c-d09a-4218-8175-e468a035f3b0" containerName="cinder-api" Oct 06 13:57:44 crc kubenswrapper[4757]: I1006 13:57:44.068250 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 06 13:57:44 crc kubenswrapper[4757]: I1006 13:57:44.073470 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Oct 06 13:57:44 crc kubenswrapper[4757]: I1006 13:57:44.073725 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-public-svc" Oct 06 13:57:44 crc kubenswrapper[4757]: I1006 13:57:44.073833 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-internal-svc" Oct 06 13:57:44 crc kubenswrapper[4757]: I1006 13:57:44.080172 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Oct 06 13:57:44 crc kubenswrapper[4757]: I1006 13:57:44.113105 4757 scope.go:117] "RemoveContainer" containerID="a7824b4a479a46011e9adeedf308d2c26333410a44be3979147c3ea06b8a14b2" Oct 06 13:57:44 crc kubenswrapper[4757]: E1006 13:57:44.117176 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a7824b4a479a46011e9adeedf308d2c26333410a44be3979147c3ea06b8a14b2\": container with ID starting with a7824b4a479a46011e9adeedf308d2c26333410a44be3979147c3ea06b8a14b2 not found: ID does not exist" containerID="a7824b4a479a46011e9adeedf308d2c26333410a44be3979147c3ea06b8a14b2" Oct 06 13:57:44 crc kubenswrapper[4757]: I1006 13:57:44.117209 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a7824b4a479a46011e9adeedf308d2c26333410a44be3979147c3ea06b8a14b2"} err="failed to get container status \"a7824b4a479a46011e9adeedf308d2c26333410a44be3979147c3ea06b8a14b2\": rpc error: code = NotFound desc = could not find container \"a7824b4a479a46011e9adeedf308d2c26333410a44be3979147c3ea06b8a14b2\": container with ID starting with a7824b4a479a46011e9adeedf308d2c26333410a44be3979147c3ea06b8a14b2 not found: ID does not exist" Oct 06 13:57:44 crc kubenswrapper[4757]: I1006 13:57:44.117230 4757 scope.go:117] "RemoveContainer" containerID="6d0ff4b524eee02a4885aa0283a6eb43fe1edee7e5122f952f1bd5d6c366c917" Oct 06 13:57:44 crc kubenswrapper[4757]: E1006 13:57:44.117457 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6d0ff4b524eee02a4885aa0283a6eb43fe1edee7e5122f952f1bd5d6c366c917\": container with ID starting with 6d0ff4b524eee02a4885aa0283a6eb43fe1edee7e5122f952f1bd5d6c366c917 not found: ID does not exist" containerID="6d0ff4b524eee02a4885aa0283a6eb43fe1edee7e5122f952f1bd5d6c366c917" Oct 06 13:57:44 crc kubenswrapper[4757]: I1006 13:57:44.117475 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6d0ff4b524eee02a4885aa0283a6eb43fe1edee7e5122f952f1bd5d6c366c917"} err="failed to get container status \"6d0ff4b524eee02a4885aa0283a6eb43fe1edee7e5122f952f1bd5d6c366c917\": rpc error: code = NotFound desc = could not find container \"6d0ff4b524eee02a4885aa0283a6eb43fe1edee7e5122f952f1bd5d6c366c917\": container with ID starting with 6d0ff4b524eee02a4885aa0283a6eb43fe1edee7e5122f952f1bd5d6c366c917 not found: ID does not exist" Oct 06 13:57:44 crc kubenswrapper[4757]: I1006 13:57:44.117489 4757 scope.go:117] "RemoveContainer" containerID="a7824b4a479a46011e9adeedf308d2c26333410a44be3979147c3ea06b8a14b2" Oct 06 13:57:44 crc kubenswrapper[4757]: I1006 13:57:44.117705 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a7824b4a479a46011e9adeedf308d2c26333410a44be3979147c3ea06b8a14b2"} err="failed to get container status \"a7824b4a479a46011e9adeedf308d2c26333410a44be3979147c3ea06b8a14b2\": rpc error: code = NotFound desc = could not find container \"a7824b4a479a46011e9adeedf308d2c26333410a44be3979147c3ea06b8a14b2\": container with ID starting with a7824b4a479a46011e9adeedf308d2c26333410a44be3979147c3ea06b8a14b2 not found: ID does not exist" Oct 06 13:57:44 crc kubenswrapper[4757]: I1006 13:57:44.117722 4757 scope.go:117] "RemoveContainer" containerID="6d0ff4b524eee02a4885aa0283a6eb43fe1edee7e5122f952f1bd5d6c366c917" Oct 06 13:57:44 crc kubenswrapper[4757]: I1006 13:57:44.118160 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6d0ff4b524eee02a4885aa0283a6eb43fe1edee7e5122f952f1bd5d6c366c917"} err="failed to get container status \"6d0ff4b524eee02a4885aa0283a6eb43fe1edee7e5122f952f1bd5d6c366c917\": rpc error: code = NotFound desc = could not find container \"6d0ff4b524eee02a4885aa0283a6eb43fe1edee7e5122f952f1bd5d6c366c917\": container with ID starting with 6d0ff4b524eee02a4885aa0283a6eb43fe1edee7e5122f952f1bd5d6c366c917 not found: ID does not exist" Oct 06 13:57:44 crc kubenswrapper[4757]: I1006 13:57:44.135359 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a274a347-259a-4919-8326-8047df9b0de8-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"a274a347-259a-4919-8326-8047df9b0de8\") " pod="openstack/cinder-api-0" Oct 06 13:57:44 crc kubenswrapper[4757]: I1006 13:57:44.135517 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a274a347-259a-4919-8326-8047df9b0de8-logs\") pod \"cinder-api-0\" (UID: \"a274a347-259a-4919-8326-8047df9b0de8\") " pod="openstack/cinder-api-0" Oct 06 13:57:44 crc kubenswrapper[4757]: I1006 13:57:44.135604 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a274a347-259a-4919-8326-8047df9b0de8-public-tls-certs\") pod \"cinder-api-0\" (UID: \"a274a347-259a-4919-8326-8047df9b0de8\") " pod="openstack/cinder-api-0" Oct 06 13:57:44 crc kubenswrapper[4757]: I1006 13:57:44.135621 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8f4hk\" (UniqueName: \"kubernetes.io/projected/a274a347-259a-4919-8326-8047df9b0de8-kube-api-access-8f4hk\") pod \"cinder-api-0\" (UID: \"a274a347-259a-4919-8326-8047df9b0de8\") " pod="openstack/cinder-api-0" Oct 06 13:57:44 crc kubenswrapper[4757]: I1006 13:57:44.135647 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a274a347-259a-4919-8326-8047df9b0de8-config-data\") pod \"cinder-api-0\" (UID: \"a274a347-259a-4919-8326-8047df9b0de8\") " pod="openstack/cinder-api-0" Oct 06 13:57:44 crc kubenswrapper[4757]: I1006 13:57:44.135669 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a274a347-259a-4919-8326-8047df9b0de8-config-data-custom\") pod \"cinder-api-0\" (UID: \"a274a347-259a-4919-8326-8047df9b0de8\") " pod="openstack/cinder-api-0" Oct 06 13:57:44 crc kubenswrapper[4757]: I1006 13:57:44.135719 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a274a347-259a-4919-8326-8047df9b0de8-etc-machine-id\") pod \"cinder-api-0\" (UID: \"a274a347-259a-4919-8326-8047df9b0de8\") " pod="openstack/cinder-api-0" Oct 06 13:57:44 crc kubenswrapper[4757]: I1006 13:57:44.135735 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a274a347-259a-4919-8326-8047df9b0de8-scripts\") pod \"cinder-api-0\" (UID: \"a274a347-259a-4919-8326-8047df9b0de8\") " pod="openstack/cinder-api-0" Oct 06 13:57:44 crc kubenswrapper[4757]: I1006 13:57:44.135784 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a274a347-259a-4919-8326-8047df9b0de8-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"a274a347-259a-4919-8326-8047df9b0de8\") " pod="openstack/cinder-api-0" Oct 06 13:57:44 crc kubenswrapper[4757]: I1006 13:57:44.193465 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1a091d6c-d09a-4218-8175-e468a035f3b0" path="/var/lib/kubelet/pods/1a091d6c-d09a-4218-8175-e468a035f3b0/volumes" Oct 06 13:57:44 crc kubenswrapper[4757]: I1006 13:57:44.237380 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a274a347-259a-4919-8326-8047df9b0de8-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"a274a347-259a-4919-8326-8047df9b0de8\") " pod="openstack/cinder-api-0" Oct 06 13:57:44 crc kubenswrapper[4757]: I1006 13:57:44.237485 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a274a347-259a-4919-8326-8047df9b0de8-logs\") pod \"cinder-api-0\" (UID: \"a274a347-259a-4919-8326-8047df9b0de8\") " pod="openstack/cinder-api-0" Oct 06 13:57:44 crc kubenswrapper[4757]: I1006 13:57:44.237550 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a274a347-259a-4919-8326-8047df9b0de8-public-tls-certs\") pod \"cinder-api-0\" (UID: \"a274a347-259a-4919-8326-8047df9b0de8\") " pod="openstack/cinder-api-0" Oct 06 13:57:44 crc kubenswrapper[4757]: I1006 13:57:44.237570 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8f4hk\" (UniqueName: \"kubernetes.io/projected/a274a347-259a-4919-8326-8047df9b0de8-kube-api-access-8f4hk\") pod \"cinder-api-0\" (UID: \"a274a347-259a-4919-8326-8047df9b0de8\") " pod="openstack/cinder-api-0" Oct 06 13:57:44 crc kubenswrapper[4757]: I1006 13:57:44.237603 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a274a347-259a-4919-8326-8047df9b0de8-config-data\") pod \"cinder-api-0\" (UID: \"a274a347-259a-4919-8326-8047df9b0de8\") " pod="openstack/cinder-api-0" Oct 06 13:57:44 crc kubenswrapper[4757]: I1006 13:57:44.237633 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a274a347-259a-4919-8326-8047df9b0de8-config-data-custom\") pod \"cinder-api-0\" (UID: \"a274a347-259a-4919-8326-8047df9b0de8\") " pod="openstack/cinder-api-0" Oct 06 13:57:44 crc kubenswrapper[4757]: I1006 13:57:44.237691 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a274a347-259a-4919-8326-8047df9b0de8-etc-machine-id\") pod \"cinder-api-0\" (UID: \"a274a347-259a-4919-8326-8047df9b0de8\") " pod="openstack/cinder-api-0" Oct 06 13:57:44 crc kubenswrapper[4757]: I1006 13:57:44.237712 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a274a347-259a-4919-8326-8047df9b0de8-scripts\") pod \"cinder-api-0\" (UID: \"a274a347-259a-4919-8326-8047df9b0de8\") " pod="openstack/cinder-api-0" Oct 06 13:57:44 crc kubenswrapper[4757]: I1006 13:57:44.237744 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a274a347-259a-4919-8326-8047df9b0de8-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"a274a347-259a-4919-8326-8047df9b0de8\") " pod="openstack/cinder-api-0" Oct 06 13:57:44 crc kubenswrapper[4757]: I1006 13:57:44.238220 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a274a347-259a-4919-8326-8047df9b0de8-etc-machine-id\") pod \"cinder-api-0\" (UID: \"a274a347-259a-4919-8326-8047df9b0de8\") " pod="openstack/cinder-api-0" Oct 06 13:57:44 crc kubenswrapper[4757]: I1006 13:57:44.238589 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a274a347-259a-4919-8326-8047df9b0de8-logs\") pod \"cinder-api-0\" (UID: \"a274a347-259a-4919-8326-8047df9b0de8\") " pod="openstack/cinder-api-0" Oct 06 13:57:44 crc kubenswrapper[4757]: I1006 13:57:44.243420 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a274a347-259a-4919-8326-8047df9b0de8-config-data-custom\") pod \"cinder-api-0\" (UID: \"a274a347-259a-4919-8326-8047df9b0de8\") " pod="openstack/cinder-api-0" Oct 06 13:57:44 crc kubenswrapper[4757]: I1006 13:57:44.245261 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a274a347-259a-4919-8326-8047df9b0de8-scripts\") pod \"cinder-api-0\" (UID: \"a274a347-259a-4919-8326-8047df9b0de8\") " pod="openstack/cinder-api-0" Oct 06 13:57:44 crc kubenswrapper[4757]: I1006 13:57:44.246325 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a274a347-259a-4919-8326-8047df9b0de8-config-data\") pod \"cinder-api-0\" (UID: \"a274a347-259a-4919-8326-8047df9b0de8\") " pod="openstack/cinder-api-0" Oct 06 13:57:44 crc kubenswrapper[4757]: I1006 13:57:44.247885 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a274a347-259a-4919-8326-8047df9b0de8-public-tls-certs\") pod \"cinder-api-0\" (UID: \"a274a347-259a-4919-8326-8047df9b0de8\") " pod="openstack/cinder-api-0" Oct 06 13:57:44 crc kubenswrapper[4757]: I1006 13:57:44.249179 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a274a347-259a-4919-8326-8047df9b0de8-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"a274a347-259a-4919-8326-8047df9b0de8\") " pod="openstack/cinder-api-0" Oct 06 13:57:44 crc kubenswrapper[4757]: I1006 13:57:44.255983 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a274a347-259a-4919-8326-8047df9b0de8-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"a274a347-259a-4919-8326-8047df9b0de8\") " pod="openstack/cinder-api-0" Oct 06 13:57:44 crc kubenswrapper[4757]: I1006 13:57:44.263590 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8f4hk\" (UniqueName: \"kubernetes.io/projected/a274a347-259a-4919-8326-8047df9b0de8-kube-api-access-8f4hk\") pod \"cinder-api-0\" (UID: \"a274a347-259a-4919-8326-8047df9b0de8\") " pod="openstack/cinder-api-0" Oct 06 13:57:44 crc kubenswrapper[4757]: I1006 13:57:44.414051 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 06 13:57:44 crc kubenswrapper[4757]: I1006 13:57:44.501154 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Oct 06 13:57:44 crc kubenswrapper[4757]: I1006 13:57:44.733970 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 06 13:57:44 crc kubenswrapper[4757]: I1006 13:57:44.817012 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Oct 06 13:57:44 crc kubenswrapper[4757]: I1006 13:57:44.853241 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/13ea51c0-01eb-4d6c-b994-094b1fd1614e-run-httpd\") pod \"13ea51c0-01eb-4d6c-b994-094b1fd1614e\" (UID: \"13ea51c0-01eb-4d6c-b994-094b1fd1614e\") " Oct 06 13:57:44 crc kubenswrapper[4757]: I1006 13:57:44.853312 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/13ea51c0-01eb-4d6c-b994-094b1fd1614e-log-httpd\") pod \"13ea51c0-01eb-4d6c-b994-094b1fd1614e\" (UID: \"13ea51c0-01eb-4d6c-b994-094b1fd1614e\") " Oct 06 13:57:44 crc kubenswrapper[4757]: I1006 13:57:44.853394 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/13ea51c0-01eb-4d6c-b994-094b1fd1614e-scripts\") pod \"13ea51c0-01eb-4d6c-b994-094b1fd1614e\" (UID: \"13ea51c0-01eb-4d6c-b994-094b1fd1614e\") " Oct 06 13:57:44 crc kubenswrapper[4757]: I1006 13:57:44.853501 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ml2jv\" (UniqueName: \"kubernetes.io/projected/13ea51c0-01eb-4d6c-b994-094b1fd1614e-kube-api-access-ml2jv\") pod \"13ea51c0-01eb-4d6c-b994-094b1fd1614e\" (UID: \"13ea51c0-01eb-4d6c-b994-094b1fd1614e\") " Oct 06 13:57:44 crc kubenswrapper[4757]: I1006 13:57:44.853529 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/13ea51c0-01eb-4d6c-b994-094b1fd1614e-sg-core-conf-yaml\") pod \"13ea51c0-01eb-4d6c-b994-094b1fd1614e\" (UID: \"13ea51c0-01eb-4d6c-b994-094b1fd1614e\") " Oct 06 13:57:44 crc kubenswrapper[4757]: I1006 13:57:44.853631 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/13ea51c0-01eb-4d6c-b994-094b1fd1614e-config-data\") pod \"13ea51c0-01eb-4d6c-b994-094b1fd1614e\" (UID: \"13ea51c0-01eb-4d6c-b994-094b1fd1614e\") " Oct 06 13:57:44 crc kubenswrapper[4757]: I1006 13:57:44.853738 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/13ea51c0-01eb-4d6c-b994-094b1fd1614e-combined-ca-bundle\") pod \"13ea51c0-01eb-4d6c-b994-094b1fd1614e\" (UID: \"13ea51c0-01eb-4d6c-b994-094b1fd1614e\") " Oct 06 13:57:44 crc kubenswrapper[4757]: I1006 13:57:44.853792 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/13ea51c0-01eb-4d6c-b994-094b1fd1614e-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "13ea51c0-01eb-4d6c-b994-094b1fd1614e" (UID: "13ea51c0-01eb-4d6c-b994-094b1fd1614e"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 13:57:44 crc kubenswrapper[4757]: I1006 13:57:44.853872 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/13ea51c0-01eb-4d6c-b994-094b1fd1614e-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "13ea51c0-01eb-4d6c-b994-094b1fd1614e" (UID: "13ea51c0-01eb-4d6c-b994-094b1fd1614e"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 13:57:44 crc kubenswrapper[4757]: I1006 13:57:44.854288 4757 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/13ea51c0-01eb-4d6c-b994-094b1fd1614e-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 06 13:57:44 crc kubenswrapper[4757]: I1006 13:57:44.854318 4757 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/13ea51c0-01eb-4d6c-b994-094b1fd1614e-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 06 13:57:44 crc kubenswrapper[4757]: I1006 13:57:44.860037 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/13ea51c0-01eb-4d6c-b994-094b1fd1614e-kube-api-access-ml2jv" (OuterVolumeSpecName: "kube-api-access-ml2jv") pod "13ea51c0-01eb-4d6c-b994-094b1fd1614e" (UID: "13ea51c0-01eb-4d6c-b994-094b1fd1614e"). InnerVolumeSpecName "kube-api-access-ml2jv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:57:44 crc kubenswrapper[4757]: I1006 13:57:44.864995 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/13ea51c0-01eb-4d6c-b994-094b1fd1614e-scripts" (OuterVolumeSpecName: "scripts") pod "13ea51c0-01eb-4d6c-b994-094b1fd1614e" (UID: "13ea51c0-01eb-4d6c-b994-094b1fd1614e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:57:44 crc kubenswrapper[4757]: I1006 13:57:44.899286 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/13ea51c0-01eb-4d6c-b994-094b1fd1614e-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "13ea51c0-01eb-4d6c-b994-094b1fd1614e" (UID: "13ea51c0-01eb-4d6c-b994-094b1fd1614e"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:57:44 crc kubenswrapper[4757]: I1006 13:57:44.935454 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/13ea51c0-01eb-4d6c-b994-094b1fd1614e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "13ea51c0-01eb-4d6c-b994-094b1fd1614e" (UID: "13ea51c0-01eb-4d6c-b994-094b1fd1614e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:57:44 crc kubenswrapper[4757]: I1006 13:57:44.955870 4757 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/13ea51c0-01eb-4d6c-b994-094b1fd1614e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 06 13:57:44 crc kubenswrapper[4757]: I1006 13:57:44.955918 4757 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/13ea51c0-01eb-4d6c-b994-094b1fd1614e-scripts\") on node \"crc\" DevicePath \"\"" Oct 06 13:57:44 crc kubenswrapper[4757]: I1006 13:57:44.955931 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ml2jv\" (UniqueName: \"kubernetes.io/projected/13ea51c0-01eb-4d6c-b994-094b1fd1614e-kube-api-access-ml2jv\") on node \"crc\" DevicePath \"\"" Oct 06 13:57:44 crc kubenswrapper[4757]: I1006 13:57:44.955947 4757 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/13ea51c0-01eb-4d6c-b994-094b1fd1614e-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 06 13:57:44 crc kubenswrapper[4757]: I1006 13:57:44.992026 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/13ea51c0-01eb-4d6c-b994-094b1fd1614e-config-data" (OuterVolumeSpecName: "config-data") pod "13ea51c0-01eb-4d6c-b994-094b1fd1614e" (UID: "13ea51c0-01eb-4d6c-b994-094b1fd1614e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:57:45 crc kubenswrapper[4757]: I1006 13:57:45.009998 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"a274a347-259a-4919-8326-8047df9b0de8","Type":"ContainerStarted","Data":"b3b1c2ceaa596a855a2c3c2e0bd1b47ddd880cfeea2a9ddb6672bde046d8748b"} Oct 06 13:57:45 crc kubenswrapper[4757]: I1006 13:57:45.014180 4757 generic.go:334] "Generic (PLEG): container finished" podID="13ea51c0-01eb-4d6c-b994-094b1fd1614e" containerID="d60c297b338571921c37dcacb6e2c17b2b97cdcb71e2e185c30834fb79c54817" exitCode=0 Oct 06 13:57:45 crc kubenswrapper[4757]: I1006 13:57:45.014205 4757 generic.go:334] "Generic (PLEG): container finished" podID="13ea51c0-01eb-4d6c-b994-094b1fd1614e" containerID="0d8220d11024223a25ae3c1fb113885cdc619ad1d18ff73e2c04765ba9af2c83" exitCode=0 Oct 06 13:57:45 crc kubenswrapper[4757]: I1006 13:57:45.015213 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 06 13:57:45 crc kubenswrapper[4757]: I1006 13:57:45.020314 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"13ea51c0-01eb-4d6c-b994-094b1fd1614e","Type":"ContainerDied","Data":"d60c297b338571921c37dcacb6e2c17b2b97cdcb71e2e185c30834fb79c54817"} Oct 06 13:57:45 crc kubenswrapper[4757]: I1006 13:57:45.020677 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"13ea51c0-01eb-4d6c-b994-094b1fd1614e","Type":"ContainerDied","Data":"0d8220d11024223a25ae3c1fb113885cdc619ad1d18ff73e2c04765ba9af2c83"} Oct 06 13:57:45 crc kubenswrapper[4757]: I1006 13:57:45.020707 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"13ea51c0-01eb-4d6c-b994-094b1fd1614e","Type":"ContainerDied","Data":"ba124818bd2ae4c232680d063bf5a02dc7b4119b8f8d8e427c005238102d7814"} Oct 06 13:57:45 crc kubenswrapper[4757]: I1006 13:57:45.020729 4757 scope.go:117] "RemoveContainer" containerID="9faed425f06d2562824a823f5642d3e0f00b9c457a01fe9d85bff147745ff6f3" Oct 06 13:57:45 crc kubenswrapper[4757]: I1006 13:57:45.071468 4757 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/13ea51c0-01eb-4d6c-b994-094b1fd1614e-config-data\") on node \"crc\" DevicePath \"\"" Oct 06 13:57:45 crc kubenswrapper[4757]: I1006 13:57:45.074467 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 06 13:57:45 crc kubenswrapper[4757]: I1006 13:57:45.089865 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 06 13:57:45 crc kubenswrapper[4757]: I1006 13:57:45.111523 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 06 13:57:45 crc kubenswrapper[4757]: E1006 13:57:45.111841 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="13ea51c0-01eb-4d6c-b994-094b1fd1614e" containerName="sg-core" Oct 06 13:57:45 crc kubenswrapper[4757]: I1006 13:57:45.111857 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="13ea51c0-01eb-4d6c-b994-094b1fd1614e" containerName="sg-core" Oct 06 13:57:45 crc kubenswrapper[4757]: E1006 13:57:45.111878 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="13ea51c0-01eb-4d6c-b994-094b1fd1614e" containerName="ceilometer-notification-agent" Oct 06 13:57:45 crc kubenswrapper[4757]: I1006 13:57:45.111885 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="13ea51c0-01eb-4d6c-b994-094b1fd1614e" containerName="ceilometer-notification-agent" Oct 06 13:57:45 crc kubenswrapper[4757]: E1006 13:57:45.111898 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="13ea51c0-01eb-4d6c-b994-094b1fd1614e" containerName="proxy-httpd" Oct 06 13:57:45 crc kubenswrapper[4757]: I1006 13:57:45.111907 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="13ea51c0-01eb-4d6c-b994-094b1fd1614e" containerName="proxy-httpd" Oct 06 13:57:45 crc kubenswrapper[4757]: E1006 13:57:45.111919 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="13ea51c0-01eb-4d6c-b994-094b1fd1614e" containerName="ceilometer-central-agent" Oct 06 13:57:45 crc kubenswrapper[4757]: I1006 13:57:45.111927 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="13ea51c0-01eb-4d6c-b994-094b1fd1614e" containerName="ceilometer-central-agent" Oct 06 13:57:45 crc kubenswrapper[4757]: I1006 13:57:45.112089 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="13ea51c0-01eb-4d6c-b994-094b1fd1614e" containerName="ceilometer-central-agent" Oct 06 13:57:45 crc kubenswrapper[4757]: I1006 13:57:45.112130 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="13ea51c0-01eb-4d6c-b994-094b1fd1614e" containerName="proxy-httpd" Oct 06 13:57:45 crc kubenswrapper[4757]: I1006 13:57:45.112140 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="13ea51c0-01eb-4d6c-b994-094b1fd1614e" containerName="ceilometer-notification-agent" Oct 06 13:57:45 crc kubenswrapper[4757]: I1006 13:57:45.112159 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="13ea51c0-01eb-4d6c-b994-094b1fd1614e" containerName="sg-core" Oct 06 13:57:45 crc kubenswrapper[4757]: I1006 13:57:45.113814 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 06 13:57:45 crc kubenswrapper[4757]: I1006 13:57:45.116560 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 06 13:57:45 crc kubenswrapper[4757]: I1006 13:57:45.116762 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 06 13:57:45 crc kubenswrapper[4757]: I1006 13:57:45.118478 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 06 13:57:45 crc kubenswrapper[4757]: I1006 13:57:45.275462 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/35e902eb-73d7-4f08-b184-3b4804d7b483-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"35e902eb-73d7-4f08-b184-3b4804d7b483\") " pod="openstack/ceilometer-0" Oct 06 13:57:45 crc kubenswrapper[4757]: I1006 13:57:45.275512 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/35e902eb-73d7-4f08-b184-3b4804d7b483-log-httpd\") pod \"ceilometer-0\" (UID: \"35e902eb-73d7-4f08-b184-3b4804d7b483\") " pod="openstack/ceilometer-0" Oct 06 13:57:45 crc kubenswrapper[4757]: I1006 13:57:45.275579 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/35e902eb-73d7-4f08-b184-3b4804d7b483-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"35e902eb-73d7-4f08-b184-3b4804d7b483\") " pod="openstack/ceilometer-0" Oct 06 13:57:45 crc kubenswrapper[4757]: I1006 13:57:45.275826 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/35e902eb-73d7-4f08-b184-3b4804d7b483-scripts\") pod \"ceilometer-0\" (UID: \"35e902eb-73d7-4f08-b184-3b4804d7b483\") " pod="openstack/ceilometer-0" Oct 06 13:57:45 crc kubenswrapper[4757]: I1006 13:57:45.276528 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mstz4\" (UniqueName: \"kubernetes.io/projected/35e902eb-73d7-4f08-b184-3b4804d7b483-kube-api-access-mstz4\") pod \"ceilometer-0\" (UID: \"35e902eb-73d7-4f08-b184-3b4804d7b483\") " pod="openstack/ceilometer-0" Oct 06 13:57:45 crc kubenswrapper[4757]: I1006 13:57:45.276737 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/35e902eb-73d7-4f08-b184-3b4804d7b483-run-httpd\") pod \"ceilometer-0\" (UID: \"35e902eb-73d7-4f08-b184-3b4804d7b483\") " pod="openstack/ceilometer-0" Oct 06 13:57:45 crc kubenswrapper[4757]: I1006 13:57:45.278791 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/35e902eb-73d7-4f08-b184-3b4804d7b483-config-data\") pod \"ceilometer-0\" (UID: \"35e902eb-73d7-4f08-b184-3b4804d7b483\") " pod="openstack/ceilometer-0" Oct 06 13:57:45 crc kubenswrapper[4757]: I1006 13:57:45.380874 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/35e902eb-73d7-4f08-b184-3b4804d7b483-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"35e902eb-73d7-4f08-b184-3b4804d7b483\") " pod="openstack/ceilometer-0" Oct 06 13:57:45 crc kubenswrapper[4757]: I1006 13:57:45.381584 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/35e902eb-73d7-4f08-b184-3b4804d7b483-scripts\") pod \"ceilometer-0\" (UID: \"35e902eb-73d7-4f08-b184-3b4804d7b483\") " pod="openstack/ceilometer-0" Oct 06 13:57:45 crc kubenswrapper[4757]: I1006 13:57:45.381652 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mstz4\" (UniqueName: \"kubernetes.io/projected/35e902eb-73d7-4f08-b184-3b4804d7b483-kube-api-access-mstz4\") pod \"ceilometer-0\" (UID: \"35e902eb-73d7-4f08-b184-3b4804d7b483\") " pod="openstack/ceilometer-0" Oct 06 13:57:45 crc kubenswrapper[4757]: I1006 13:57:45.381690 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/35e902eb-73d7-4f08-b184-3b4804d7b483-run-httpd\") pod \"ceilometer-0\" (UID: \"35e902eb-73d7-4f08-b184-3b4804d7b483\") " pod="openstack/ceilometer-0" Oct 06 13:57:45 crc kubenswrapper[4757]: I1006 13:57:45.381716 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/35e902eb-73d7-4f08-b184-3b4804d7b483-config-data\") pod \"ceilometer-0\" (UID: \"35e902eb-73d7-4f08-b184-3b4804d7b483\") " pod="openstack/ceilometer-0" Oct 06 13:57:45 crc kubenswrapper[4757]: I1006 13:57:45.381737 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/35e902eb-73d7-4f08-b184-3b4804d7b483-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"35e902eb-73d7-4f08-b184-3b4804d7b483\") " pod="openstack/ceilometer-0" Oct 06 13:57:45 crc kubenswrapper[4757]: I1006 13:57:45.381764 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/35e902eb-73d7-4f08-b184-3b4804d7b483-log-httpd\") pod \"ceilometer-0\" (UID: \"35e902eb-73d7-4f08-b184-3b4804d7b483\") " pod="openstack/ceilometer-0" Oct 06 13:57:45 crc kubenswrapper[4757]: I1006 13:57:45.382068 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/35e902eb-73d7-4f08-b184-3b4804d7b483-log-httpd\") pod \"ceilometer-0\" (UID: \"35e902eb-73d7-4f08-b184-3b4804d7b483\") " pod="openstack/ceilometer-0" Oct 06 13:57:45 crc kubenswrapper[4757]: I1006 13:57:45.382441 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/35e902eb-73d7-4f08-b184-3b4804d7b483-run-httpd\") pod \"ceilometer-0\" (UID: \"35e902eb-73d7-4f08-b184-3b4804d7b483\") " pod="openstack/ceilometer-0" Oct 06 13:57:45 crc kubenswrapper[4757]: I1006 13:57:45.386758 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/35e902eb-73d7-4f08-b184-3b4804d7b483-config-data\") pod \"ceilometer-0\" (UID: \"35e902eb-73d7-4f08-b184-3b4804d7b483\") " pod="openstack/ceilometer-0" Oct 06 13:57:45 crc kubenswrapper[4757]: I1006 13:57:45.388620 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/35e902eb-73d7-4f08-b184-3b4804d7b483-scripts\") pod \"ceilometer-0\" (UID: \"35e902eb-73d7-4f08-b184-3b4804d7b483\") " pod="openstack/ceilometer-0" Oct 06 13:57:45 crc kubenswrapper[4757]: I1006 13:57:45.396452 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/35e902eb-73d7-4f08-b184-3b4804d7b483-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"35e902eb-73d7-4f08-b184-3b4804d7b483\") " pod="openstack/ceilometer-0" Oct 06 13:57:45 crc kubenswrapper[4757]: I1006 13:57:45.396627 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/35e902eb-73d7-4f08-b184-3b4804d7b483-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"35e902eb-73d7-4f08-b184-3b4804d7b483\") " pod="openstack/ceilometer-0" Oct 06 13:57:45 crc kubenswrapper[4757]: I1006 13:57:45.398023 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mstz4\" (UniqueName: \"kubernetes.io/projected/35e902eb-73d7-4f08-b184-3b4804d7b483-kube-api-access-mstz4\") pod \"ceilometer-0\" (UID: \"35e902eb-73d7-4f08-b184-3b4804d7b483\") " pod="openstack/ceilometer-0" Oct 06 13:57:45 crc kubenswrapper[4757]: I1006 13:57:45.433930 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 06 13:57:46 crc kubenswrapper[4757]: I1006 13:57:46.031379 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"a274a347-259a-4919-8326-8047df9b0de8","Type":"ContainerStarted","Data":"56539da0d904647a7143dee780f395e8c568df58a690438ce63ea5e4176ade53"} Oct 06 13:57:46 crc kubenswrapper[4757]: I1006 13:57:46.200581 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="13ea51c0-01eb-4d6c-b994-094b1fd1614e" path="/var/lib/kubelet/pods/13ea51c0-01eb-4d6c-b994-094b1fd1614e/volumes" Oct 06 13:57:47 crc kubenswrapper[4757]: I1006 13:57:47.646740 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 06 13:57:49 crc kubenswrapper[4757]: I1006 13:57:49.537633 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-67bc848c5c-dclqc" Oct 06 13:57:49 crc kubenswrapper[4757]: I1006 13:57:49.596690 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6fc489599-7rtht"] Oct 06 13:57:49 crc kubenswrapper[4757]: I1006 13:57:49.596916 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6fc489599-7rtht" podUID="25ca3ce2-1e8e-4cab-b349-84fb82c52a56" containerName="dnsmasq-dns" containerID="cri-o://14e4d0f68ad0a7a58e8f785958588888ef8b9418f7d9107506c3557ef11bf2d7" gracePeriod=10 Oct 06 13:57:49 crc kubenswrapper[4757]: I1006 13:57:49.935556 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Oct 06 13:57:49 crc kubenswrapper[4757]: I1006 13:57:49.985968 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 06 13:57:50 crc kubenswrapper[4757]: I1006 13:57:50.068512 4757 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-6fc489599-7rtht" podUID="25ca3ce2-1e8e-4cab-b349-84fb82c52a56" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.158:5353: connect: connection refused" Oct 06 13:57:50 crc kubenswrapper[4757]: I1006 13:57:50.074907 4757 generic.go:334] "Generic (PLEG): container finished" podID="25ca3ce2-1e8e-4cab-b349-84fb82c52a56" containerID="14e4d0f68ad0a7a58e8f785958588888ef8b9418f7d9107506c3557ef11bf2d7" exitCode=0 Oct 06 13:57:50 crc kubenswrapper[4757]: I1006 13:57:50.075085 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="3319038f-4b3b-47c4-880b-6dc356283248" containerName="cinder-scheduler" containerID="cri-o://8da7ce44e013c3489aeab6813581419f2abbece874c1d0f222bcbf9a26111934" gracePeriod=30 Oct 06 13:57:50 crc kubenswrapper[4757]: I1006 13:57:50.075305 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6fc489599-7rtht" event={"ID":"25ca3ce2-1e8e-4cab-b349-84fb82c52a56","Type":"ContainerDied","Data":"14e4d0f68ad0a7a58e8f785958588888ef8b9418f7d9107506c3557ef11bf2d7"} Oct 06 13:57:50 crc kubenswrapper[4757]: I1006 13:57:50.075535 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="3319038f-4b3b-47c4-880b-6dc356283248" containerName="probe" containerID="cri-o://2fed91e04d37e588ad11a9573cbf6d5f7e04c37e956234e1c31609ed168f2e2e" gracePeriod=30 Oct 06 13:57:50 crc kubenswrapper[4757]: I1006 13:57:50.215496 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-688cb45c44-gmn5j" Oct 06 13:57:50 crc kubenswrapper[4757]: I1006 13:57:50.708484 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-8f2b9"] Oct 06 13:57:50 crc kubenswrapper[4757]: I1006 13:57:50.711113 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-8f2b9" Oct 06 13:57:50 crc kubenswrapper[4757]: I1006 13:57:50.760563 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-8f2b9"] Oct 06 13:57:50 crc kubenswrapper[4757]: I1006 13:57:50.801934 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4nw6h\" (UniqueName: \"kubernetes.io/projected/fbc7bc9f-cc73-4943-94fd-c7288e7efb52-kube-api-access-4nw6h\") pod \"nova-api-db-create-8f2b9\" (UID: \"fbc7bc9f-cc73-4943-94fd-c7288e7efb52\") " pod="openstack/nova-api-db-create-8f2b9" Oct 06 13:57:50 crc kubenswrapper[4757]: I1006 13:57:50.803524 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-6hgd2"] Oct 06 13:57:50 crc kubenswrapper[4757]: I1006 13:57:50.804601 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-6hgd2" Oct 06 13:57:50 crc kubenswrapper[4757]: I1006 13:57:50.813544 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-6hgd2"] Oct 06 13:57:50 crc kubenswrapper[4757]: I1006 13:57:50.904699 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4nw6h\" (UniqueName: \"kubernetes.io/projected/fbc7bc9f-cc73-4943-94fd-c7288e7efb52-kube-api-access-4nw6h\") pod \"nova-api-db-create-8f2b9\" (UID: \"fbc7bc9f-cc73-4943-94fd-c7288e7efb52\") " pod="openstack/nova-api-db-create-8f2b9" Oct 06 13:57:50 crc kubenswrapper[4757]: I1006 13:57:50.904836 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j59w7\" (UniqueName: \"kubernetes.io/projected/d2b1e842-4aee-4ab7-97ad-6407e2d6834d-kube-api-access-j59w7\") pod \"nova-cell0-db-create-6hgd2\" (UID: \"d2b1e842-4aee-4ab7-97ad-6407e2d6834d\") " pod="openstack/nova-cell0-db-create-6hgd2" Oct 06 13:57:50 crc kubenswrapper[4757]: I1006 13:57:50.917804 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-7ql5g"] Oct 06 13:57:50 crc kubenswrapper[4757]: I1006 13:57:50.919224 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-7ql5g" Oct 06 13:57:50 crc kubenswrapper[4757]: I1006 13:57:50.922371 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-7c7848899c-8bm7g" Oct 06 13:57:50 crc kubenswrapper[4757]: I1006 13:57:50.924300 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-7c7848899c-8bm7g" Oct 06 13:57:50 crc kubenswrapper[4757]: I1006 13:57:50.934087 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4nw6h\" (UniqueName: \"kubernetes.io/projected/fbc7bc9f-cc73-4943-94fd-c7288e7efb52-kube-api-access-4nw6h\") pod \"nova-api-db-create-8f2b9\" (UID: \"fbc7bc9f-cc73-4943-94fd-c7288e7efb52\") " pod="openstack/nova-api-db-create-8f2b9" Oct 06 13:57:50 crc kubenswrapper[4757]: I1006 13:57:50.974261 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-7ql5g"] Oct 06 13:57:51 crc kubenswrapper[4757]: I1006 13:57:51.006676 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j59w7\" (UniqueName: \"kubernetes.io/projected/d2b1e842-4aee-4ab7-97ad-6407e2d6834d-kube-api-access-j59w7\") pod \"nova-cell0-db-create-6hgd2\" (UID: \"d2b1e842-4aee-4ab7-97ad-6407e2d6834d\") " pod="openstack/nova-cell0-db-create-6hgd2" Oct 06 13:57:51 crc kubenswrapper[4757]: I1006 13:57:51.006734 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lkhf8\" (UniqueName: \"kubernetes.io/projected/fe9e11b6-6610-4dc3-a5fc-94142df40ee3-kube-api-access-lkhf8\") pod \"nova-cell1-db-create-7ql5g\" (UID: \"fe9e11b6-6610-4dc3-a5fc-94142df40ee3\") " pod="openstack/nova-cell1-db-create-7ql5g" Oct 06 13:57:51 crc kubenswrapper[4757]: I1006 13:57:51.030804 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-8f2b9" Oct 06 13:57:51 crc kubenswrapper[4757]: I1006 13:57:51.033898 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j59w7\" (UniqueName: \"kubernetes.io/projected/d2b1e842-4aee-4ab7-97ad-6407e2d6834d-kube-api-access-j59w7\") pod \"nova-cell0-db-create-6hgd2\" (UID: \"d2b1e842-4aee-4ab7-97ad-6407e2d6834d\") " pod="openstack/nova-cell0-db-create-6hgd2" Oct 06 13:57:51 crc kubenswrapper[4757]: I1006 13:57:51.090366 4757 generic.go:334] "Generic (PLEG): container finished" podID="3319038f-4b3b-47c4-880b-6dc356283248" containerID="2fed91e04d37e588ad11a9573cbf6d5f7e04c37e956234e1c31609ed168f2e2e" exitCode=0 Oct 06 13:57:51 crc kubenswrapper[4757]: I1006 13:57:51.091222 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"3319038f-4b3b-47c4-880b-6dc356283248","Type":"ContainerDied","Data":"2fed91e04d37e588ad11a9573cbf6d5f7e04c37e956234e1c31609ed168f2e2e"} Oct 06 13:57:51 crc kubenswrapper[4757]: I1006 13:57:51.108048 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lkhf8\" (UniqueName: \"kubernetes.io/projected/fe9e11b6-6610-4dc3-a5fc-94142df40ee3-kube-api-access-lkhf8\") pod \"nova-cell1-db-create-7ql5g\" (UID: \"fe9e11b6-6610-4dc3-a5fc-94142df40ee3\") " pod="openstack/nova-cell1-db-create-7ql5g" Oct 06 13:57:51 crc kubenswrapper[4757]: I1006 13:57:51.131622 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-6hgd2" Oct 06 13:57:51 crc kubenswrapper[4757]: I1006 13:57:51.138204 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 06 13:57:51 crc kubenswrapper[4757]: I1006 13:57:51.138458 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162" containerName="glance-log" containerID="cri-o://a3e0134d66bb89269e9b6e10be5999c12c3e61a50dd6b03ef5cc78f7a1d9590b" gracePeriod=30 Oct 06 13:57:51 crc kubenswrapper[4757]: I1006 13:57:51.138855 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162" containerName="glance-httpd" containerID="cri-o://570f055d1b9d883fd2181d75b0306f41240cf366b7fe048bedccb9ac1e317289" gracePeriod=30 Oct 06 13:57:51 crc kubenswrapper[4757]: I1006 13:57:51.152252 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lkhf8\" (UniqueName: \"kubernetes.io/projected/fe9e11b6-6610-4dc3-a5fc-94142df40ee3-kube-api-access-lkhf8\") pod \"nova-cell1-db-create-7ql5g\" (UID: \"fe9e11b6-6610-4dc3-a5fc-94142df40ee3\") " pod="openstack/nova-cell1-db-create-7ql5g" Oct 06 13:57:51 crc kubenswrapper[4757]: I1006 13:57:51.260491 4757 scope.go:117] "RemoveContainer" containerID="a3d694055a2f0251b8053f7c5838166ce1d777a6ee357dcf3f852e28846503d8" Oct 06 13:57:51 crc kubenswrapper[4757]: I1006 13:57:51.298328 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-7ql5g" Oct 06 13:57:51 crc kubenswrapper[4757]: I1006 13:57:51.404452 4757 scope.go:117] "RemoveContainer" containerID="d60c297b338571921c37dcacb6e2c17b2b97cdcb71e2e185c30834fb79c54817" Oct 06 13:57:51 crc kubenswrapper[4757]: I1006 13:57:51.562299 4757 scope.go:117] "RemoveContainer" containerID="0d8220d11024223a25ae3c1fb113885cdc619ad1d18ff73e2c04765ba9af2c83" Oct 06 13:57:51 crc kubenswrapper[4757]: I1006 13:57:51.616955 4757 scope.go:117] "RemoveContainer" containerID="9faed425f06d2562824a823f5642d3e0f00b9c457a01fe9d85bff147745ff6f3" Oct 06 13:57:51 crc kubenswrapper[4757]: E1006 13:57:51.618657 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9faed425f06d2562824a823f5642d3e0f00b9c457a01fe9d85bff147745ff6f3\": container with ID starting with 9faed425f06d2562824a823f5642d3e0f00b9c457a01fe9d85bff147745ff6f3 not found: ID does not exist" containerID="9faed425f06d2562824a823f5642d3e0f00b9c457a01fe9d85bff147745ff6f3" Oct 06 13:57:51 crc kubenswrapper[4757]: I1006 13:57:51.618688 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9faed425f06d2562824a823f5642d3e0f00b9c457a01fe9d85bff147745ff6f3"} err="failed to get container status \"9faed425f06d2562824a823f5642d3e0f00b9c457a01fe9d85bff147745ff6f3\": rpc error: code = NotFound desc = could not find container \"9faed425f06d2562824a823f5642d3e0f00b9c457a01fe9d85bff147745ff6f3\": container with ID starting with 9faed425f06d2562824a823f5642d3e0f00b9c457a01fe9d85bff147745ff6f3 not found: ID does not exist" Oct 06 13:57:51 crc kubenswrapper[4757]: I1006 13:57:51.618709 4757 scope.go:117] "RemoveContainer" containerID="a3d694055a2f0251b8053f7c5838166ce1d777a6ee357dcf3f852e28846503d8" Oct 06 13:57:51 crc kubenswrapper[4757]: E1006 13:57:51.623205 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a3d694055a2f0251b8053f7c5838166ce1d777a6ee357dcf3f852e28846503d8\": container with ID starting with a3d694055a2f0251b8053f7c5838166ce1d777a6ee357dcf3f852e28846503d8 not found: ID does not exist" containerID="a3d694055a2f0251b8053f7c5838166ce1d777a6ee357dcf3f852e28846503d8" Oct 06 13:57:51 crc kubenswrapper[4757]: I1006 13:57:51.623249 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a3d694055a2f0251b8053f7c5838166ce1d777a6ee357dcf3f852e28846503d8"} err="failed to get container status \"a3d694055a2f0251b8053f7c5838166ce1d777a6ee357dcf3f852e28846503d8\": rpc error: code = NotFound desc = could not find container \"a3d694055a2f0251b8053f7c5838166ce1d777a6ee357dcf3f852e28846503d8\": container with ID starting with a3d694055a2f0251b8053f7c5838166ce1d777a6ee357dcf3f852e28846503d8 not found: ID does not exist" Oct 06 13:57:51 crc kubenswrapper[4757]: I1006 13:57:51.623282 4757 scope.go:117] "RemoveContainer" containerID="d60c297b338571921c37dcacb6e2c17b2b97cdcb71e2e185c30834fb79c54817" Oct 06 13:57:51 crc kubenswrapper[4757]: E1006 13:57:51.623614 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d60c297b338571921c37dcacb6e2c17b2b97cdcb71e2e185c30834fb79c54817\": container with ID starting with d60c297b338571921c37dcacb6e2c17b2b97cdcb71e2e185c30834fb79c54817 not found: ID does not exist" containerID="d60c297b338571921c37dcacb6e2c17b2b97cdcb71e2e185c30834fb79c54817" Oct 06 13:57:51 crc kubenswrapper[4757]: I1006 13:57:51.623637 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d60c297b338571921c37dcacb6e2c17b2b97cdcb71e2e185c30834fb79c54817"} err="failed to get container status \"d60c297b338571921c37dcacb6e2c17b2b97cdcb71e2e185c30834fb79c54817\": rpc error: code = NotFound desc = could not find container \"d60c297b338571921c37dcacb6e2c17b2b97cdcb71e2e185c30834fb79c54817\": container with ID starting with d60c297b338571921c37dcacb6e2c17b2b97cdcb71e2e185c30834fb79c54817 not found: ID does not exist" Oct 06 13:57:51 crc kubenswrapper[4757]: I1006 13:57:51.623652 4757 scope.go:117] "RemoveContainer" containerID="0d8220d11024223a25ae3c1fb113885cdc619ad1d18ff73e2c04765ba9af2c83" Oct 06 13:57:51 crc kubenswrapper[4757]: E1006 13:57:51.623922 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0d8220d11024223a25ae3c1fb113885cdc619ad1d18ff73e2c04765ba9af2c83\": container with ID starting with 0d8220d11024223a25ae3c1fb113885cdc619ad1d18ff73e2c04765ba9af2c83 not found: ID does not exist" containerID="0d8220d11024223a25ae3c1fb113885cdc619ad1d18ff73e2c04765ba9af2c83" Oct 06 13:57:51 crc kubenswrapper[4757]: I1006 13:57:51.623940 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0d8220d11024223a25ae3c1fb113885cdc619ad1d18ff73e2c04765ba9af2c83"} err="failed to get container status \"0d8220d11024223a25ae3c1fb113885cdc619ad1d18ff73e2c04765ba9af2c83\": rpc error: code = NotFound desc = could not find container \"0d8220d11024223a25ae3c1fb113885cdc619ad1d18ff73e2c04765ba9af2c83\": container with ID starting with 0d8220d11024223a25ae3c1fb113885cdc619ad1d18ff73e2c04765ba9af2c83 not found: ID does not exist" Oct 06 13:57:51 crc kubenswrapper[4757]: I1006 13:57:51.623954 4757 scope.go:117] "RemoveContainer" containerID="9faed425f06d2562824a823f5642d3e0f00b9c457a01fe9d85bff147745ff6f3" Oct 06 13:57:51 crc kubenswrapper[4757]: I1006 13:57:51.625470 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9faed425f06d2562824a823f5642d3e0f00b9c457a01fe9d85bff147745ff6f3"} err="failed to get container status \"9faed425f06d2562824a823f5642d3e0f00b9c457a01fe9d85bff147745ff6f3\": rpc error: code = NotFound desc = could not find container \"9faed425f06d2562824a823f5642d3e0f00b9c457a01fe9d85bff147745ff6f3\": container with ID starting with 9faed425f06d2562824a823f5642d3e0f00b9c457a01fe9d85bff147745ff6f3 not found: ID does not exist" Oct 06 13:57:51 crc kubenswrapper[4757]: I1006 13:57:51.625495 4757 scope.go:117] "RemoveContainer" containerID="a3d694055a2f0251b8053f7c5838166ce1d777a6ee357dcf3f852e28846503d8" Oct 06 13:57:51 crc kubenswrapper[4757]: I1006 13:57:51.625727 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a3d694055a2f0251b8053f7c5838166ce1d777a6ee357dcf3f852e28846503d8"} err="failed to get container status \"a3d694055a2f0251b8053f7c5838166ce1d777a6ee357dcf3f852e28846503d8\": rpc error: code = NotFound desc = could not find container \"a3d694055a2f0251b8053f7c5838166ce1d777a6ee357dcf3f852e28846503d8\": container with ID starting with a3d694055a2f0251b8053f7c5838166ce1d777a6ee357dcf3f852e28846503d8 not found: ID does not exist" Oct 06 13:57:51 crc kubenswrapper[4757]: I1006 13:57:51.625746 4757 scope.go:117] "RemoveContainer" containerID="d60c297b338571921c37dcacb6e2c17b2b97cdcb71e2e185c30834fb79c54817" Oct 06 13:57:51 crc kubenswrapper[4757]: I1006 13:57:51.633025 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d60c297b338571921c37dcacb6e2c17b2b97cdcb71e2e185c30834fb79c54817"} err="failed to get container status \"d60c297b338571921c37dcacb6e2c17b2b97cdcb71e2e185c30834fb79c54817\": rpc error: code = NotFound desc = could not find container \"d60c297b338571921c37dcacb6e2c17b2b97cdcb71e2e185c30834fb79c54817\": container with ID starting with d60c297b338571921c37dcacb6e2c17b2b97cdcb71e2e185c30834fb79c54817 not found: ID does not exist" Oct 06 13:57:51 crc kubenswrapper[4757]: I1006 13:57:51.633079 4757 scope.go:117] "RemoveContainer" containerID="0d8220d11024223a25ae3c1fb113885cdc619ad1d18ff73e2c04765ba9af2c83" Oct 06 13:57:51 crc kubenswrapper[4757]: I1006 13:57:51.633807 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0d8220d11024223a25ae3c1fb113885cdc619ad1d18ff73e2c04765ba9af2c83"} err="failed to get container status \"0d8220d11024223a25ae3c1fb113885cdc619ad1d18ff73e2c04765ba9af2c83\": rpc error: code = NotFound desc = could not find container \"0d8220d11024223a25ae3c1fb113885cdc619ad1d18ff73e2c04765ba9af2c83\": container with ID starting with 0d8220d11024223a25ae3c1fb113885cdc619ad1d18ff73e2c04765ba9af2c83 not found: ID does not exist" Oct 06 13:57:51 crc kubenswrapper[4757]: I1006 13:57:51.692493 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6fc489599-7rtht" Oct 06 13:57:51 crc kubenswrapper[4757]: I1006 13:57:51.825702 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/25ca3ce2-1e8e-4cab-b349-84fb82c52a56-config\") pod \"25ca3ce2-1e8e-4cab-b349-84fb82c52a56\" (UID: \"25ca3ce2-1e8e-4cab-b349-84fb82c52a56\") " Oct 06 13:57:51 crc kubenswrapper[4757]: I1006 13:57:51.825766 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-js9bh\" (UniqueName: \"kubernetes.io/projected/25ca3ce2-1e8e-4cab-b349-84fb82c52a56-kube-api-access-js9bh\") pod \"25ca3ce2-1e8e-4cab-b349-84fb82c52a56\" (UID: \"25ca3ce2-1e8e-4cab-b349-84fb82c52a56\") " Oct 06 13:57:51 crc kubenswrapper[4757]: I1006 13:57:51.825785 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/25ca3ce2-1e8e-4cab-b349-84fb82c52a56-ovsdbserver-nb\") pod \"25ca3ce2-1e8e-4cab-b349-84fb82c52a56\" (UID: \"25ca3ce2-1e8e-4cab-b349-84fb82c52a56\") " Oct 06 13:57:51 crc kubenswrapper[4757]: I1006 13:57:51.825830 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/25ca3ce2-1e8e-4cab-b349-84fb82c52a56-dns-swift-storage-0\") pod \"25ca3ce2-1e8e-4cab-b349-84fb82c52a56\" (UID: \"25ca3ce2-1e8e-4cab-b349-84fb82c52a56\") " Oct 06 13:57:51 crc kubenswrapper[4757]: I1006 13:57:51.825884 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/25ca3ce2-1e8e-4cab-b349-84fb82c52a56-dns-svc\") pod \"25ca3ce2-1e8e-4cab-b349-84fb82c52a56\" (UID: \"25ca3ce2-1e8e-4cab-b349-84fb82c52a56\") " Oct 06 13:57:51 crc kubenswrapper[4757]: I1006 13:57:51.826049 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/25ca3ce2-1e8e-4cab-b349-84fb82c52a56-ovsdbserver-sb\") pod \"25ca3ce2-1e8e-4cab-b349-84fb82c52a56\" (UID: \"25ca3ce2-1e8e-4cab-b349-84fb82c52a56\") " Oct 06 13:57:51 crc kubenswrapper[4757]: I1006 13:57:51.831113 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25ca3ce2-1e8e-4cab-b349-84fb82c52a56-kube-api-access-js9bh" (OuterVolumeSpecName: "kube-api-access-js9bh") pod "25ca3ce2-1e8e-4cab-b349-84fb82c52a56" (UID: "25ca3ce2-1e8e-4cab-b349-84fb82c52a56"). InnerVolumeSpecName "kube-api-access-js9bh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:57:51 crc kubenswrapper[4757]: I1006 13:57:51.907640 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25ca3ce2-1e8e-4cab-b349-84fb82c52a56-config" (OuterVolumeSpecName: "config") pod "25ca3ce2-1e8e-4cab-b349-84fb82c52a56" (UID: "25ca3ce2-1e8e-4cab-b349-84fb82c52a56"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:57:51 crc kubenswrapper[4757]: I1006 13:57:51.915217 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25ca3ce2-1e8e-4cab-b349-84fb82c52a56-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "25ca3ce2-1e8e-4cab-b349-84fb82c52a56" (UID: "25ca3ce2-1e8e-4cab-b349-84fb82c52a56"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:57:51 crc kubenswrapper[4757]: I1006 13:57:51.925011 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25ca3ce2-1e8e-4cab-b349-84fb82c52a56-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "25ca3ce2-1e8e-4cab-b349-84fb82c52a56" (UID: "25ca3ce2-1e8e-4cab-b349-84fb82c52a56"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:57:51 crc kubenswrapper[4757]: I1006 13:57:51.925456 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25ca3ce2-1e8e-4cab-b349-84fb82c52a56-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "25ca3ce2-1e8e-4cab-b349-84fb82c52a56" (UID: "25ca3ce2-1e8e-4cab-b349-84fb82c52a56"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:57:51 crc kubenswrapper[4757]: I1006 13:57:51.928732 4757 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/25ca3ce2-1e8e-4cab-b349-84fb82c52a56-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 06 13:57:51 crc kubenswrapper[4757]: I1006 13:57:51.928759 4757 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/25ca3ce2-1e8e-4cab-b349-84fb82c52a56-config\") on node \"crc\" DevicePath \"\"" Oct 06 13:57:51 crc kubenswrapper[4757]: I1006 13:57:51.928772 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-js9bh\" (UniqueName: \"kubernetes.io/projected/25ca3ce2-1e8e-4cab-b349-84fb82c52a56-kube-api-access-js9bh\") on node \"crc\" DevicePath \"\"" Oct 06 13:57:51 crc kubenswrapper[4757]: I1006 13:57:51.928791 4757 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/25ca3ce2-1e8e-4cab-b349-84fb82c52a56-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 06 13:57:51 crc kubenswrapper[4757]: I1006 13:57:51.928801 4757 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/25ca3ce2-1e8e-4cab-b349-84fb82c52a56-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 06 13:57:51 crc kubenswrapper[4757]: I1006 13:57:51.946757 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25ca3ce2-1e8e-4cab-b349-84fb82c52a56-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "25ca3ce2-1e8e-4cab-b349-84fb82c52a56" (UID: "25ca3ce2-1e8e-4cab-b349-84fb82c52a56"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:57:52 crc kubenswrapper[4757]: I1006 13:57:52.030895 4757 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/25ca3ce2-1e8e-4cab-b349-84fb82c52a56-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 06 13:57:52 crc kubenswrapper[4757]: I1006 13:57:52.111566 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-7ql5g"] Oct 06 13:57:52 crc kubenswrapper[4757]: I1006 13:57:52.119514 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6fc489599-7rtht" event={"ID":"25ca3ce2-1e8e-4cab-b349-84fb82c52a56","Type":"ContainerDied","Data":"602f41b412a152d3520d5d7a7a6e9e6e2c2ec9141c2bcae4cc67406984b0d9bd"} Oct 06 13:57:52 crc kubenswrapper[4757]: I1006 13:57:52.119594 4757 scope.go:117] "RemoveContainer" containerID="14e4d0f68ad0a7a58e8f785958588888ef8b9418f7d9107506c3557ef11bf2d7" Oct 06 13:57:52 crc kubenswrapper[4757]: I1006 13:57:52.119540 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6fc489599-7rtht" Oct 06 13:57:52 crc kubenswrapper[4757]: I1006 13:57:52.127437 4757 generic.go:334] "Generic (PLEG): container finished" podID="e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162" containerID="a3e0134d66bb89269e9b6e10be5999c12c3e61a50dd6b03ef5cc78f7a1d9590b" exitCode=143 Oct 06 13:57:52 crc kubenswrapper[4757]: I1006 13:57:52.127523 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162","Type":"ContainerDied","Data":"a3e0134d66bb89269e9b6e10be5999c12c3e61a50dd6b03ef5cc78f7a1d9590b"} Oct 06 13:57:52 crc kubenswrapper[4757]: I1006 13:57:52.132126 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-6hgd2"] Oct 06 13:57:52 crc kubenswrapper[4757]: I1006 13:57:52.133569 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"f2520c55-6861-4a9a-aa4e-8c1b1ace3dfb","Type":"ContainerStarted","Data":"60487956b45ab32166592c2b7eceedfd15fb90339151dc9e4ceaf70a848d1bb5"} Oct 06 13:57:52 crc kubenswrapper[4757]: W1006 13:57:52.141378 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfe9e11b6_6610_4dc3_a5fc_94142df40ee3.slice/crio-633a6484b7eaaf16e6bf480064b95c0c9f91e5e2fb35bdc0a432c07e1dbcb71a WatchSource:0}: Error finding container 633a6484b7eaaf16e6bf480064b95c0c9f91e5e2fb35bdc0a432c07e1dbcb71a: Status 404 returned error can't find the container with id 633a6484b7eaaf16e6bf480064b95c0c9f91e5e2fb35bdc0a432c07e1dbcb71a Oct 06 13:57:52 crc kubenswrapper[4757]: I1006 13:57:52.155721 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=1.455026301 podStartE2EDuration="16.155698408s" podCreationTimestamp="2025-10-06 13:57:36 +0000 UTC" firstStartedPulling="2025-10-06 13:57:36.806714225 +0000 UTC m=+1145.304032762" lastFinishedPulling="2025-10-06 13:57:51.507386332 +0000 UTC m=+1160.004704869" observedRunningTime="2025-10-06 13:57:52.14859788 +0000 UTC m=+1160.645916417" watchObservedRunningTime="2025-10-06 13:57:52.155698408 +0000 UTC m=+1160.653016945" Oct 06 13:57:52 crc kubenswrapper[4757]: W1006 13:57:52.197433 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd2b1e842_4aee_4ab7_97ad_6407e2d6834d.slice/crio-0ce605ca30a4381a66fe0e092c6cedcd84a3400008f8b08851773a3f1c52652e WatchSource:0}: Error finding container 0ce605ca30a4381a66fe0e092c6cedcd84a3400008f8b08851773a3f1c52652e: Status 404 returned error can't find the container with id 0ce605ca30a4381a66fe0e092c6cedcd84a3400008f8b08851773a3f1c52652e Oct 06 13:57:52 crc kubenswrapper[4757]: I1006 13:57:52.259345 4757 scope.go:117] "RemoveContainer" containerID="17d7c1c3d9be3488cc60672b77690532199b118dcafd84380eeb6ca9c394edae" Oct 06 13:57:52 crc kubenswrapper[4757]: I1006 13:57:52.272409 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-8f2b9"] Oct 06 13:57:52 crc kubenswrapper[4757]: I1006 13:57:52.272479 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-587f5f887c-jf8v5" Oct 06 13:57:52 crc kubenswrapper[4757]: I1006 13:57:52.326939 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6fc489599-7rtht"] Oct 06 13:57:52 crc kubenswrapper[4757]: I1006 13:57:52.336761 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6fc489599-7rtht"] Oct 06 13:57:52 crc kubenswrapper[4757]: I1006 13:57:52.374243 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 06 13:57:52 crc kubenswrapper[4757]: I1006 13:57:52.421906 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-688cb45c44-gmn5j"] Oct 06 13:57:52 crc kubenswrapper[4757]: I1006 13:57:52.422145 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-688cb45c44-gmn5j" podUID="2c70918c-e944-47ef-8c0c-bbf476d75a77" containerName="neutron-api" containerID="cri-o://25dcda8ac297ba14d0e224a3fc094317b883ab5928767bb009570cf619d04053" gracePeriod=30 Oct 06 13:57:52 crc kubenswrapper[4757]: I1006 13:57:52.424193 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-688cb45c44-gmn5j" podUID="2c70918c-e944-47ef-8c0c-bbf476d75a77" containerName="neutron-httpd" containerID="cri-o://b473a669d877f9b737770a744f0fe354417ac0f8915f067a05b66404ac5974c6" gracePeriod=30 Oct 06 13:57:53 crc kubenswrapper[4757]: I1006 13:57:53.144457 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"a274a347-259a-4919-8326-8047df9b0de8","Type":"ContainerStarted","Data":"66adf9671d94409c8d526ef54e28136551e0537951eeabe62b8c147ef1d852be"} Oct 06 13:57:53 crc kubenswrapper[4757]: I1006 13:57:53.145831 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Oct 06 13:57:53 crc kubenswrapper[4757]: I1006 13:57:53.147145 4757 generic.go:334] "Generic (PLEG): container finished" podID="fe9e11b6-6610-4dc3-a5fc-94142df40ee3" containerID="9e94ec7898a8d5cddd1308d4a24463b9f75f24738f45b41036651b8543dd2f7c" exitCode=0 Oct 06 13:57:53 crc kubenswrapper[4757]: I1006 13:57:53.147200 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-7ql5g" event={"ID":"fe9e11b6-6610-4dc3-a5fc-94142df40ee3","Type":"ContainerDied","Data":"9e94ec7898a8d5cddd1308d4a24463b9f75f24738f45b41036651b8543dd2f7c"} Oct 06 13:57:53 crc kubenswrapper[4757]: I1006 13:57:53.147219 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-7ql5g" event={"ID":"fe9e11b6-6610-4dc3-a5fc-94142df40ee3","Type":"ContainerStarted","Data":"633a6484b7eaaf16e6bf480064b95c0c9f91e5e2fb35bdc0a432c07e1dbcb71a"} Oct 06 13:57:53 crc kubenswrapper[4757]: I1006 13:57:53.148660 4757 generic.go:334] "Generic (PLEG): container finished" podID="d2b1e842-4aee-4ab7-97ad-6407e2d6834d" containerID="ece99cfce39d7c0be1b08a62794d4aa985ed9e4c7a9eebc564749f2540ee8d63" exitCode=0 Oct 06 13:57:53 crc kubenswrapper[4757]: I1006 13:57:53.148729 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-6hgd2" event={"ID":"d2b1e842-4aee-4ab7-97ad-6407e2d6834d","Type":"ContainerDied","Data":"ece99cfce39d7c0be1b08a62794d4aa985ed9e4c7a9eebc564749f2540ee8d63"} Oct 06 13:57:53 crc kubenswrapper[4757]: I1006 13:57:53.148940 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-6hgd2" event={"ID":"d2b1e842-4aee-4ab7-97ad-6407e2d6834d","Type":"ContainerStarted","Data":"0ce605ca30a4381a66fe0e092c6cedcd84a3400008f8b08851773a3f1c52652e"} Oct 06 13:57:53 crc kubenswrapper[4757]: I1006 13:57:53.152888 4757 generic.go:334] "Generic (PLEG): container finished" podID="2c70918c-e944-47ef-8c0c-bbf476d75a77" containerID="b473a669d877f9b737770a744f0fe354417ac0f8915f067a05b66404ac5974c6" exitCode=0 Oct 06 13:57:53 crc kubenswrapper[4757]: I1006 13:57:53.152964 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-688cb45c44-gmn5j" event={"ID":"2c70918c-e944-47ef-8c0c-bbf476d75a77","Type":"ContainerDied","Data":"b473a669d877f9b737770a744f0fe354417ac0f8915f067a05b66404ac5974c6"} Oct 06 13:57:53 crc kubenswrapper[4757]: I1006 13:57:53.154515 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"35e902eb-73d7-4f08-b184-3b4804d7b483","Type":"ContainerStarted","Data":"867d30eef6520c14dbf91a3f8d8fbe5f34232493bdb349ebc91ab068cd4093f2"} Oct 06 13:57:53 crc kubenswrapper[4757]: I1006 13:57:53.156227 4757 generic.go:334] "Generic (PLEG): container finished" podID="fbc7bc9f-cc73-4943-94fd-c7288e7efb52" containerID="1f848e4aee396e52434f17956365c4c6aaf22ac1b6bad582ca2b6f0263241cd5" exitCode=0 Oct 06 13:57:53 crc kubenswrapper[4757]: I1006 13:57:53.156948 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-8f2b9" event={"ID":"fbc7bc9f-cc73-4943-94fd-c7288e7efb52","Type":"ContainerDied","Data":"1f848e4aee396e52434f17956365c4c6aaf22ac1b6bad582ca2b6f0263241cd5"} Oct 06 13:57:53 crc kubenswrapper[4757]: I1006 13:57:53.156992 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-8f2b9" event={"ID":"fbc7bc9f-cc73-4943-94fd-c7288e7efb52","Type":"ContainerStarted","Data":"027270fe64bc57ddaf44f4ac19e1c9540508bfad0244c34d20725f51d06f5e8e"} Oct 06 13:57:53 crc kubenswrapper[4757]: I1006 13:57:53.173772 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=9.173747053 podStartE2EDuration="9.173747053s" podCreationTimestamp="2025-10-06 13:57:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:57:53.164653154 +0000 UTC m=+1161.661971701" watchObservedRunningTime="2025-10-06 13:57:53.173747053 +0000 UTC m=+1161.671065590" Oct 06 13:57:53 crc kubenswrapper[4757]: I1006 13:57:53.917819 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 06 13:57:53 crc kubenswrapper[4757]: I1006 13:57:53.918599 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="cc62b42c-2672-4413-9768-4949a52c7659" containerName="glance-log" containerID="cri-o://fe001ee38925cca5382d9fe049d7e9db56dc87cbbb2fcf7ea31b331f5f614f78" gracePeriod=30 Oct 06 13:57:53 crc kubenswrapper[4757]: I1006 13:57:53.918735 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="cc62b42c-2672-4413-9768-4949a52c7659" containerName="glance-httpd" containerID="cri-o://3ef57524bb68d3a21d10583387c6e1165591d0cb469902192d8fe145b8a0a9db" gracePeriod=30 Oct 06 13:57:54 crc kubenswrapper[4757]: I1006 13:57:54.173000 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"35e902eb-73d7-4f08-b184-3b4804d7b483","Type":"ContainerStarted","Data":"3227955555c26be0206242a13ec3b1a049b08b322e0b2b0c3fd1e8cc76e0a76a"} Oct 06 13:57:54 crc kubenswrapper[4757]: I1006 13:57:54.175357 4757 generic.go:334] "Generic (PLEG): container finished" podID="3319038f-4b3b-47c4-880b-6dc356283248" containerID="8da7ce44e013c3489aeab6813581419f2abbece874c1d0f222bcbf9a26111934" exitCode=0 Oct 06 13:57:54 crc kubenswrapper[4757]: I1006 13:57:54.175415 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"3319038f-4b3b-47c4-880b-6dc356283248","Type":"ContainerDied","Data":"8da7ce44e013c3489aeab6813581419f2abbece874c1d0f222bcbf9a26111934"} Oct 06 13:57:54 crc kubenswrapper[4757]: I1006 13:57:54.178502 4757 generic.go:334] "Generic (PLEG): container finished" podID="cc62b42c-2672-4413-9768-4949a52c7659" containerID="fe001ee38925cca5382d9fe049d7e9db56dc87cbbb2fcf7ea31b331f5f614f78" exitCode=143 Oct 06 13:57:54 crc kubenswrapper[4757]: I1006 13:57:54.178784 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"cc62b42c-2672-4413-9768-4949a52c7659","Type":"ContainerDied","Data":"fe001ee38925cca5382d9fe049d7e9db56dc87cbbb2fcf7ea31b331f5f614f78"} Oct 06 13:57:54 crc kubenswrapper[4757]: I1006 13:57:54.201911 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25ca3ce2-1e8e-4cab-b349-84fb82c52a56" path="/var/lib/kubelet/pods/25ca3ce2-1e8e-4cab-b349-84fb82c52a56/volumes" Oct 06 13:57:54 crc kubenswrapper[4757]: I1006 13:57:54.311255 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 06 13:57:54 crc kubenswrapper[4757]: I1006 13:57:54.375659 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3319038f-4b3b-47c4-880b-6dc356283248-config-data-custom\") pod \"3319038f-4b3b-47c4-880b-6dc356283248\" (UID: \"3319038f-4b3b-47c4-880b-6dc356283248\") " Oct 06 13:57:54 crc kubenswrapper[4757]: I1006 13:57:54.375704 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3319038f-4b3b-47c4-880b-6dc356283248-config-data\") pod \"3319038f-4b3b-47c4-880b-6dc356283248\" (UID: \"3319038f-4b3b-47c4-880b-6dc356283248\") " Oct 06 13:57:54 crc kubenswrapper[4757]: I1006 13:57:54.375767 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3319038f-4b3b-47c4-880b-6dc356283248-combined-ca-bundle\") pod \"3319038f-4b3b-47c4-880b-6dc356283248\" (UID: \"3319038f-4b3b-47c4-880b-6dc356283248\") " Oct 06 13:57:54 crc kubenswrapper[4757]: I1006 13:57:54.375848 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/3319038f-4b3b-47c4-880b-6dc356283248-etc-machine-id\") pod \"3319038f-4b3b-47c4-880b-6dc356283248\" (UID: \"3319038f-4b3b-47c4-880b-6dc356283248\") " Oct 06 13:57:54 crc kubenswrapper[4757]: I1006 13:57:54.375871 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vdh8w\" (UniqueName: \"kubernetes.io/projected/3319038f-4b3b-47c4-880b-6dc356283248-kube-api-access-vdh8w\") pod \"3319038f-4b3b-47c4-880b-6dc356283248\" (UID: \"3319038f-4b3b-47c4-880b-6dc356283248\") " Oct 06 13:57:54 crc kubenswrapper[4757]: I1006 13:57:54.375898 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3319038f-4b3b-47c4-880b-6dc356283248-scripts\") pod \"3319038f-4b3b-47c4-880b-6dc356283248\" (UID: \"3319038f-4b3b-47c4-880b-6dc356283248\") " Oct 06 13:57:54 crc kubenswrapper[4757]: I1006 13:57:54.384131 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3319038f-4b3b-47c4-880b-6dc356283248-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "3319038f-4b3b-47c4-880b-6dc356283248" (UID: "3319038f-4b3b-47c4-880b-6dc356283248"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 06 13:57:54 crc kubenswrapper[4757]: I1006 13:57:54.393515 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3319038f-4b3b-47c4-880b-6dc356283248-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "3319038f-4b3b-47c4-880b-6dc356283248" (UID: "3319038f-4b3b-47c4-880b-6dc356283248"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:57:54 crc kubenswrapper[4757]: I1006 13:57:54.395782 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3319038f-4b3b-47c4-880b-6dc356283248-scripts" (OuterVolumeSpecName: "scripts") pod "3319038f-4b3b-47c4-880b-6dc356283248" (UID: "3319038f-4b3b-47c4-880b-6dc356283248"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:57:54 crc kubenswrapper[4757]: I1006 13:57:54.403231 4757 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/glance-default-external-api-0" podUID="e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162" containerName="glance-httpd" probeResult="failure" output="Get \"https://10.217.0.147:9292/healthcheck\": read tcp 10.217.0.2:33972->10.217.0.147:9292: read: connection reset by peer" Oct 06 13:57:54 crc kubenswrapper[4757]: I1006 13:57:54.403347 4757 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/glance-default-external-api-0" podUID="e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162" containerName="glance-log" probeResult="failure" output="Get \"https://10.217.0.147:9292/healthcheck\": read tcp 10.217.0.2:33980->10.217.0.147:9292: read: connection reset by peer" Oct 06 13:57:54 crc kubenswrapper[4757]: I1006 13:57:54.407219 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3319038f-4b3b-47c4-880b-6dc356283248-kube-api-access-vdh8w" (OuterVolumeSpecName: "kube-api-access-vdh8w") pod "3319038f-4b3b-47c4-880b-6dc356283248" (UID: "3319038f-4b3b-47c4-880b-6dc356283248"). InnerVolumeSpecName "kube-api-access-vdh8w". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:57:54 crc kubenswrapper[4757]: I1006 13:57:54.479783 4757 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/3319038f-4b3b-47c4-880b-6dc356283248-etc-machine-id\") on node \"crc\" DevicePath \"\"" Oct 06 13:57:54 crc kubenswrapper[4757]: I1006 13:57:54.479841 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vdh8w\" (UniqueName: \"kubernetes.io/projected/3319038f-4b3b-47c4-880b-6dc356283248-kube-api-access-vdh8w\") on node \"crc\" DevicePath \"\"" Oct 06 13:57:54 crc kubenswrapper[4757]: I1006 13:57:54.479868 4757 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3319038f-4b3b-47c4-880b-6dc356283248-scripts\") on node \"crc\" DevicePath \"\"" Oct 06 13:57:54 crc kubenswrapper[4757]: I1006 13:57:54.479879 4757 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3319038f-4b3b-47c4-880b-6dc356283248-config-data-custom\") on node \"crc\" DevicePath \"\"" Oct 06 13:57:54 crc kubenswrapper[4757]: I1006 13:57:54.489517 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3319038f-4b3b-47c4-880b-6dc356283248-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3319038f-4b3b-47c4-880b-6dc356283248" (UID: "3319038f-4b3b-47c4-880b-6dc356283248"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:57:54 crc kubenswrapper[4757]: I1006 13:57:54.582155 4757 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3319038f-4b3b-47c4-880b-6dc356283248-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 06 13:57:54 crc kubenswrapper[4757]: I1006 13:57:54.582282 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3319038f-4b3b-47c4-880b-6dc356283248-config-data" (OuterVolumeSpecName: "config-data") pod "3319038f-4b3b-47c4-880b-6dc356283248" (UID: "3319038f-4b3b-47c4-880b-6dc356283248"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:57:54 crc kubenswrapper[4757]: I1006 13:57:54.674523 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-7ql5g" Oct 06 13:57:54 crc kubenswrapper[4757]: I1006 13:57:54.685111 4757 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3319038f-4b3b-47c4-880b-6dc356283248-config-data\") on node \"crc\" DevicePath \"\"" Oct 06 13:57:54 crc kubenswrapper[4757]: I1006 13:57:54.762283 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-6hgd2" Oct 06 13:57:54 crc kubenswrapper[4757]: I1006 13:57:54.764967 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-8f2b9" Oct 06 13:57:54 crc kubenswrapper[4757]: I1006 13:57:54.785898 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lkhf8\" (UniqueName: \"kubernetes.io/projected/fe9e11b6-6610-4dc3-a5fc-94142df40ee3-kube-api-access-lkhf8\") pod \"fe9e11b6-6610-4dc3-a5fc-94142df40ee3\" (UID: \"fe9e11b6-6610-4dc3-a5fc-94142df40ee3\") " Oct 06 13:57:54 crc kubenswrapper[4757]: I1006 13:57:54.786065 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j59w7\" (UniqueName: \"kubernetes.io/projected/d2b1e842-4aee-4ab7-97ad-6407e2d6834d-kube-api-access-j59w7\") pod \"d2b1e842-4aee-4ab7-97ad-6407e2d6834d\" (UID: \"d2b1e842-4aee-4ab7-97ad-6407e2d6834d\") " Oct 06 13:57:54 crc kubenswrapper[4757]: I1006 13:57:54.791169 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d2b1e842-4aee-4ab7-97ad-6407e2d6834d-kube-api-access-j59w7" (OuterVolumeSpecName: "kube-api-access-j59w7") pod "d2b1e842-4aee-4ab7-97ad-6407e2d6834d" (UID: "d2b1e842-4aee-4ab7-97ad-6407e2d6834d"). InnerVolumeSpecName "kube-api-access-j59w7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:57:54 crc kubenswrapper[4757]: I1006 13:57:54.793834 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fe9e11b6-6610-4dc3-a5fc-94142df40ee3-kube-api-access-lkhf8" (OuterVolumeSpecName: "kube-api-access-lkhf8") pod "fe9e11b6-6610-4dc3-a5fc-94142df40ee3" (UID: "fe9e11b6-6610-4dc3-a5fc-94142df40ee3"). InnerVolumeSpecName "kube-api-access-lkhf8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:57:54 crc kubenswrapper[4757]: I1006 13:57:54.887241 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4nw6h\" (UniqueName: \"kubernetes.io/projected/fbc7bc9f-cc73-4943-94fd-c7288e7efb52-kube-api-access-4nw6h\") pod \"fbc7bc9f-cc73-4943-94fd-c7288e7efb52\" (UID: \"fbc7bc9f-cc73-4943-94fd-c7288e7efb52\") " Oct 06 13:57:54 crc kubenswrapper[4757]: I1006 13:57:54.887890 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lkhf8\" (UniqueName: \"kubernetes.io/projected/fe9e11b6-6610-4dc3-a5fc-94142df40ee3-kube-api-access-lkhf8\") on node \"crc\" DevicePath \"\"" Oct 06 13:57:54 crc kubenswrapper[4757]: I1006 13:57:54.887906 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j59w7\" (UniqueName: \"kubernetes.io/projected/d2b1e842-4aee-4ab7-97ad-6407e2d6834d-kube-api-access-j59w7\") on node \"crc\" DevicePath \"\"" Oct 06 13:57:54 crc kubenswrapper[4757]: I1006 13:57:54.890715 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fbc7bc9f-cc73-4943-94fd-c7288e7efb52-kube-api-access-4nw6h" (OuterVolumeSpecName: "kube-api-access-4nw6h") pod "fbc7bc9f-cc73-4943-94fd-c7288e7efb52" (UID: "fbc7bc9f-cc73-4943-94fd-c7288e7efb52"). InnerVolumeSpecName "kube-api-access-4nw6h". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:57:54 crc kubenswrapper[4757]: I1006 13:57:54.990316 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4nw6h\" (UniqueName: \"kubernetes.io/projected/fbc7bc9f-cc73-4943-94fd-c7288e7efb52-kube-api-access-4nw6h\") on node \"crc\" DevicePath \"\"" Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.060077 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.092769 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162-public-tls-certs\") pod \"e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162\" (UID: \"e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162\") " Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.092826 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162-logs\") pod \"e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162\" (UID: \"e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162\") " Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.092869 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vdxzf\" (UniqueName: \"kubernetes.io/projected/e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162-kube-api-access-vdxzf\") pod \"e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162\" (UID: \"e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162\") " Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.092901 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162-config-data\") pod \"e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162\" (UID: \"e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162\") " Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.093026 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162-combined-ca-bundle\") pod \"e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162\" (UID: \"e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162\") " Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.093050 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162\" (UID: \"e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162\") " Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.093120 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162-scripts\") pod \"e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162\" (UID: \"e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162\") " Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.093148 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162-httpd-run\") pod \"e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162\" (UID: \"e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162\") " Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.094280 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162" (UID: "e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.106896 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162-logs" (OuterVolumeSpecName: "logs") pod "e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162" (UID: "e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.142295 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162-kube-api-access-vdxzf" (OuterVolumeSpecName: "kube-api-access-vdxzf") pod "e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162" (UID: "e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162"). InnerVolumeSpecName "kube-api-access-vdxzf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.145634 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162-scripts" (OuterVolumeSpecName: "scripts") pod "e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162" (UID: "e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.164317 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage05-crc" (OuterVolumeSpecName: "glance") pod "e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162" (UID: "e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162"). InnerVolumeSpecName "local-storage05-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.195897 4757 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162-logs\") on node \"crc\" DevicePath \"\"" Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.196294 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vdxzf\" (UniqueName: \"kubernetes.io/projected/e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162-kube-api-access-vdxzf\") on node \"crc\" DevicePath \"\"" Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.196347 4757 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" " Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.196404 4757 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162-scripts\") on node \"crc\" DevicePath \"\"" Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.196463 4757 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.217565 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162-config-data" (OuterVolumeSpecName: "config-data") pod "e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162" (UID: "e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.223146 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162" (UID: "e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.226729 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-8f2b9" event={"ID":"fbc7bc9f-cc73-4943-94fd-c7288e7efb52","Type":"ContainerDied","Data":"027270fe64bc57ddaf44f4ac19e1c9540508bfad0244c34d20725f51d06f5e8e"} Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.226873 4757 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="027270fe64bc57ddaf44f4ac19e1c9540508bfad0244c34d20725f51d06f5e8e" Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.227028 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-8f2b9" Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.229542 4757 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage05-crc" (UniqueName: "kubernetes.io/local-volume/local-storage05-crc") on node "crc" Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.241520 4757 generic.go:334] "Generic (PLEG): container finished" podID="e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162" containerID="570f055d1b9d883fd2181d75b0306f41240cf366b7fe048bedccb9ac1e317289" exitCode=0 Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.241605 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.241629 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162","Type":"ContainerDied","Data":"570f055d1b9d883fd2181d75b0306f41240cf366b7fe048bedccb9ac1e317289"} Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.245176 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162","Type":"ContainerDied","Data":"ddaadff2159738ae07a5e0b9679af6ad9973d6cbf7bb0e373f3b04df6c7535f0"} Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.245257 4757 scope.go:117] "RemoveContainer" containerID="570f055d1b9d883fd2181d75b0306f41240cf366b7fe048bedccb9ac1e317289" Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.261651 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"3319038f-4b3b-47c4-880b-6dc356283248","Type":"ContainerDied","Data":"c0560fe1b4a8dd964b5909bcd448b2158908daddd2d0a9a481be7e62032e6360"} Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.261757 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.291554 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-7ql5g" Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.291568 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-7ql5g" event={"ID":"fe9e11b6-6610-4dc3-a5fc-94142df40ee3","Type":"ContainerDied","Data":"633a6484b7eaaf16e6bf480064b95c0c9f91e5e2fb35bdc0a432c07e1dbcb71a"} Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.291610 4757 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="633a6484b7eaaf16e6bf480064b95c0c9f91e5e2fb35bdc0a432c07e1dbcb71a" Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.297022 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162" (UID: "e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.298338 4757 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162-public-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.298363 4757 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162-config-data\") on node \"crc\" DevicePath \"\"" Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.298374 4757 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.298385 4757 reconciler_common.go:293] "Volume detached for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" DevicePath \"\"" Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.308224 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-6hgd2" Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.308484 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-6hgd2" event={"ID":"d2b1e842-4aee-4ab7-97ad-6407e2d6834d","Type":"ContainerDied","Data":"0ce605ca30a4381a66fe0e092c6cedcd84a3400008f8b08851773a3f1c52652e"} Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.308515 4757 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0ce605ca30a4381a66fe0e092c6cedcd84a3400008f8b08851773a3f1c52652e" Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.326126 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"35e902eb-73d7-4f08-b184-3b4804d7b483","Type":"ContainerStarted","Data":"928c6b86589c64328bce8ae329baea34e5559cfc23e8ec86853784a5d6880d10"} Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.337217 4757 scope.go:117] "RemoveContainer" containerID="a3e0134d66bb89269e9b6e10be5999c12c3e61a50dd6b03ef5cc78f7a1d9590b" Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.378626 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.379956 4757 scope.go:117] "RemoveContainer" containerID="570f055d1b9d883fd2181d75b0306f41240cf366b7fe048bedccb9ac1e317289" Oct 06 13:57:55 crc kubenswrapper[4757]: E1006 13:57:55.383343 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"570f055d1b9d883fd2181d75b0306f41240cf366b7fe048bedccb9ac1e317289\": container with ID starting with 570f055d1b9d883fd2181d75b0306f41240cf366b7fe048bedccb9ac1e317289 not found: ID does not exist" containerID="570f055d1b9d883fd2181d75b0306f41240cf366b7fe048bedccb9ac1e317289" Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.383401 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"570f055d1b9d883fd2181d75b0306f41240cf366b7fe048bedccb9ac1e317289"} err="failed to get container status \"570f055d1b9d883fd2181d75b0306f41240cf366b7fe048bedccb9ac1e317289\": rpc error: code = NotFound desc = could not find container \"570f055d1b9d883fd2181d75b0306f41240cf366b7fe048bedccb9ac1e317289\": container with ID starting with 570f055d1b9d883fd2181d75b0306f41240cf366b7fe048bedccb9ac1e317289 not found: ID does not exist" Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.383428 4757 scope.go:117] "RemoveContainer" containerID="a3e0134d66bb89269e9b6e10be5999c12c3e61a50dd6b03ef5cc78f7a1d9590b" Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.385474 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 06 13:57:55 crc kubenswrapper[4757]: E1006 13:57:55.385876 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a3e0134d66bb89269e9b6e10be5999c12c3e61a50dd6b03ef5cc78f7a1d9590b\": container with ID starting with a3e0134d66bb89269e9b6e10be5999c12c3e61a50dd6b03ef5cc78f7a1d9590b not found: ID does not exist" containerID="a3e0134d66bb89269e9b6e10be5999c12c3e61a50dd6b03ef5cc78f7a1d9590b" Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.385914 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a3e0134d66bb89269e9b6e10be5999c12c3e61a50dd6b03ef5cc78f7a1d9590b"} err="failed to get container status \"a3e0134d66bb89269e9b6e10be5999c12c3e61a50dd6b03ef5cc78f7a1d9590b\": rpc error: code = NotFound desc = could not find container \"a3e0134d66bb89269e9b6e10be5999c12c3e61a50dd6b03ef5cc78f7a1d9590b\": container with ID starting with a3e0134d66bb89269e9b6e10be5999c12c3e61a50dd6b03ef5cc78f7a1d9590b not found: ID does not exist" Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.385943 4757 scope.go:117] "RemoveContainer" containerID="2fed91e04d37e588ad11a9573cbf6d5f7e04c37e956234e1c31609ed168f2e2e" Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.413871 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Oct 06 13:57:55 crc kubenswrapper[4757]: E1006 13:57:55.414528 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fbc7bc9f-cc73-4943-94fd-c7288e7efb52" containerName="mariadb-database-create" Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.414543 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="fbc7bc9f-cc73-4943-94fd-c7288e7efb52" containerName="mariadb-database-create" Oct 06 13:57:55 crc kubenswrapper[4757]: E1006 13:57:55.414557 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="25ca3ce2-1e8e-4cab-b349-84fb82c52a56" containerName="init" Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.414564 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="25ca3ce2-1e8e-4cab-b349-84fb82c52a56" containerName="init" Oct 06 13:57:55 crc kubenswrapper[4757]: E1006 13:57:55.414577 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d2b1e842-4aee-4ab7-97ad-6407e2d6834d" containerName="mariadb-database-create" Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.414584 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="d2b1e842-4aee-4ab7-97ad-6407e2d6834d" containerName="mariadb-database-create" Oct 06 13:57:55 crc kubenswrapper[4757]: E1006 13:57:55.414598 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162" containerName="glance-log" Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.414604 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162" containerName="glance-log" Oct 06 13:57:55 crc kubenswrapper[4757]: E1006 13:57:55.414620 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fe9e11b6-6610-4dc3-a5fc-94142df40ee3" containerName="mariadb-database-create" Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.414626 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="fe9e11b6-6610-4dc3-a5fc-94142df40ee3" containerName="mariadb-database-create" Oct 06 13:57:55 crc kubenswrapper[4757]: E1006 13:57:55.414637 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="25ca3ce2-1e8e-4cab-b349-84fb82c52a56" containerName="dnsmasq-dns" Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.414642 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="25ca3ce2-1e8e-4cab-b349-84fb82c52a56" containerName="dnsmasq-dns" Oct 06 13:57:55 crc kubenswrapper[4757]: E1006 13:57:55.414659 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3319038f-4b3b-47c4-880b-6dc356283248" containerName="cinder-scheduler" Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.414665 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="3319038f-4b3b-47c4-880b-6dc356283248" containerName="cinder-scheduler" Oct 06 13:57:55 crc kubenswrapper[4757]: E1006 13:57:55.414674 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162" containerName="glance-httpd" Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.414679 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162" containerName="glance-httpd" Oct 06 13:57:55 crc kubenswrapper[4757]: E1006 13:57:55.414691 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3319038f-4b3b-47c4-880b-6dc356283248" containerName="probe" Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.414696 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="3319038f-4b3b-47c4-880b-6dc356283248" containerName="probe" Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.414849 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="fe9e11b6-6610-4dc3-a5fc-94142df40ee3" containerName="mariadb-database-create" Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.414861 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="d2b1e842-4aee-4ab7-97ad-6407e2d6834d" containerName="mariadb-database-create" Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.414867 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="3319038f-4b3b-47c4-880b-6dc356283248" containerName="probe" Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.414876 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="3319038f-4b3b-47c4-880b-6dc356283248" containerName="cinder-scheduler" Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.414884 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="fbc7bc9f-cc73-4943-94fd-c7288e7efb52" containerName="mariadb-database-create" Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.414894 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162" containerName="glance-log" Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.414903 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162" containerName="glance-httpd" Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.414936 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="25ca3ce2-1e8e-4cab-b349-84fb82c52a56" containerName="dnsmasq-dns" Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.415845 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.418594 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.420843 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.468011 4757 scope.go:117] "RemoveContainer" containerID="8da7ce44e013c3489aeab6813581419f2abbece874c1d0f222bcbf9a26111934" Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.507712 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gt4sk\" (UniqueName: \"kubernetes.io/projected/5a2f1fd8-9191-40cd-9edb-6a681214aaa9-kube-api-access-gt4sk\") pod \"cinder-scheduler-0\" (UID: \"5a2f1fd8-9191-40cd-9edb-6a681214aaa9\") " pod="openstack/cinder-scheduler-0" Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.507799 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5a2f1fd8-9191-40cd-9edb-6a681214aaa9-config-data\") pod \"cinder-scheduler-0\" (UID: \"5a2f1fd8-9191-40cd-9edb-6a681214aaa9\") " pod="openstack/cinder-scheduler-0" Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.507831 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5a2f1fd8-9191-40cd-9edb-6a681214aaa9-scripts\") pod \"cinder-scheduler-0\" (UID: \"5a2f1fd8-9191-40cd-9edb-6a681214aaa9\") " pod="openstack/cinder-scheduler-0" Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.508139 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5a2f1fd8-9191-40cd-9edb-6a681214aaa9-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"5a2f1fd8-9191-40cd-9edb-6a681214aaa9\") " pod="openstack/cinder-scheduler-0" Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.508526 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a2f1fd8-9191-40cd-9edb-6a681214aaa9-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"5a2f1fd8-9191-40cd-9edb-6a681214aaa9\") " pod="openstack/cinder-scheduler-0" Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.508951 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/5a2f1fd8-9191-40cd-9edb-6a681214aaa9-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"5a2f1fd8-9191-40cd-9edb-6a681214aaa9\") " pod="openstack/cinder-scheduler-0" Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.582369 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.591176 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.601817 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.606023 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.610495 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5a2f1fd8-9191-40cd-9edb-6a681214aaa9-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"5a2f1fd8-9191-40cd-9edb-6a681214aaa9\") " pod="openstack/cinder-scheduler-0" Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.610586 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a2f1fd8-9191-40cd-9edb-6a681214aaa9-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"5a2f1fd8-9191-40cd-9edb-6a681214aaa9\") " pod="openstack/cinder-scheduler-0" Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.610625 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/5a2f1fd8-9191-40cd-9edb-6a681214aaa9-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"5a2f1fd8-9191-40cd-9edb-6a681214aaa9\") " pod="openstack/cinder-scheduler-0" Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.610655 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gt4sk\" (UniqueName: \"kubernetes.io/projected/5a2f1fd8-9191-40cd-9edb-6a681214aaa9-kube-api-access-gt4sk\") pod \"cinder-scheduler-0\" (UID: \"5a2f1fd8-9191-40cd-9edb-6a681214aaa9\") " pod="openstack/cinder-scheduler-0" Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.610712 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5a2f1fd8-9191-40cd-9edb-6a681214aaa9-config-data\") pod \"cinder-scheduler-0\" (UID: \"5a2f1fd8-9191-40cd-9edb-6a681214aaa9\") " pod="openstack/cinder-scheduler-0" Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.610738 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5a2f1fd8-9191-40cd-9edb-6a681214aaa9-scripts\") pod \"cinder-scheduler-0\" (UID: \"5a2f1fd8-9191-40cd-9edb-6a681214aaa9\") " pod="openstack/cinder-scheduler-0" Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.611032 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/5a2f1fd8-9191-40cd-9edb-6a681214aaa9-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"5a2f1fd8-9191-40cd-9edb-6a681214aaa9\") " pod="openstack/cinder-scheduler-0" Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.614555 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.614728 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.616948 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5a2f1fd8-9191-40cd-9edb-6a681214aaa9-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"5a2f1fd8-9191-40cd-9edb-6a681214aaa9\") " pod="openstack/cinder-scheduler-0" Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.619194 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5a2f1fd8-9191-40cd-9edb-6a681214aaa9-config-data\") pod \"cinder-scheduler-0\" (UID: \"5a2f1fd8-9191-40cd-9edb-6a681214aaa9\") " pod="openstack/cinder-scheduler-0" Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.619593 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5a2f1fd8-9191-40cd-9edb-6a681214aaa9-scripts\") pod \"cinder-scheduler-0\" (UID: \"5a2f1fd8-9191-40cd-9edb-6a681214aaa9\") " pod="openstack/cinder-scheduler-0" Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.624578 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a2f1fd8-9191-40cd-9edb-6a681214aaa9-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"5a2f1fd8-9191-40cd-9edb-6a681214aaa9\") " pod="openstack/cinder-scheduler-0" Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.626862 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.665869 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gt4sk\" (UniqueName: \"kubernetes.io/projected/5a2f1fd8-9191-40cd-9edb-6a681214aaa9-kube-api-access-gt4sk\") pod \"cinder-scheduler-0\" (UID: \"5a2f1fd8-9191-40cd-9edb-6a681214aaa9\") " pod="openstack/cinder-scheduler-0" Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.712707 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4da5a9db-df84-4b71-b566-7c723fd7eb65-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"4da5a9db-df84-4b71-b566-7c723fd7eb65\") " pod="openstack/glance-default-external-api-0" Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.712769 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4da5a9db-df84-4b71-b566-7c723fd7eb65-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"4da5a9db-df84-4b71-b566-7c723fd7eb65\") " pod="openstack/glance-default-external-api-0" Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.712791 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z4vb9\" (UniqueName: \"kubernetes.io/projected/4da5a9db-df84-4b71-b566-7c723fd7eb65-kube-api-access-z4vb9\") pod \"glance-default-external-api-0\" (UID: \"4da5a9db-df84-4b71-b566-7c723fd7eb65\") " pod="openstack/glance-default-external-api-0" Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.712958 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4da5a9db-df84-4b71-b566-7c723fd7eb65-config-data\") pod \"glance-default-external-api-0\" (UID: \"4da5a9db-df84-4b71-b566-7c723fd7eb65\") " pod="openstack/glance-default-external-api-0" Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.713141 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4da5a9db-df84-4b71-b566-7c723fd7eb65-logs\") pod \"glance-default-external-api-0\" (UID: \"4da5a9db-df84-4b71-b566-7c723fd7eb65\") " pod="openstack/glance-default-external-api-0" Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.713227 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"4da5a9db-df84-4b71-b566-7c723fd7eb65\") " pod="openstack/glance-default-external-api-0" Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.713393 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4da5a9db-df84-4b71-b566-7c723fd7eb65-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"4da5a9db-df84-4b71-b566-7c723fd7eb65\") " pod="openstack/glance-default-external-api-0" Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.713432 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4da5a9db-df84-4b71-b566-7c723fd7eb65-scripts\") pod \"glance-default-external-api-0\" (UID: \"4da5a9db-df84-4b71-b566-7c723fd7eb65\") " pod="openstack/glance-default-external-api-0" Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.777002 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.815048 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4da5a9db-df84-4b71-b566-7c723fd7eb65-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"4da5a9db-df84-4b71-b566-7c723fd7eb65\") " pod="openstack/glance-default-external-api-0" Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.815114 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4da5a9db-df84-4b71-b566-7c723fd7eb65-scripts\") pod \"glance-default-external-api-0\" (UID: \"4da5a9db-df84-4b71-b566-7c723fd7eb65\") " pod="openstack/glance-default-external-api-0" Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.815158 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4da5a9db-df84-4b71-b566-7c723fd7eb65-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"4da5a9db-df84-4b71-b566-7c723fd7eb65\") " pod="openstack/glance-default-external-api-0" Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.815193 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4da5a9db-df84-4b71-b566-7c723fd7eb65-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"4da5a9db-df84-4b71-b566-7c723fd7eb65\") " pod="openstack/glance-default-external-api-0" Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.815211 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z4vb9\" (UniqueName: \"kubernetes.io/projected/4da5a9db-df84-4b71-b566-7c723fd7eb65-kube-api-access-z4vb9\") pod \"glance-default-external-api-0\" (UID: \"4da5a9db-df84-4b71-b566-7c723fd7eb65\") " pod="openstack/glance-default-external-api-0" Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.815249 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4da5a9db-df84-4b71-b566-7c723fd7eb65-config-data\") pod \"glance-default-external-api-0\" (UID: \"4da5a9db-df84-4b71-b566-7c723fd7eb65\") " pod="openstack/glance-default-external-api-0" Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.815291 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4da5a9db-df84-4b71-b566-7c723fd7eb65-logs\") pod \"glance-default-external-api-0\" (UID: \"4da5a9db-df84-4b71-b566-7c723fd7eb65\") " pod="openstack/glance-default-external-api-0" Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.815317 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"4da5a9db-df84-4b71-b566-7c723fd7eb65\") " pod="openstack/glance-default-external-api-0" Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.815529 4757 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"4da5a9db-df84-4b71-b566-7c723fd7eb65\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/glance-default-external-api-0" Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.815686 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4da5a9db-df84-4b71-b566-7c723fd7eb65-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"4da5a9db-df84-4b71-b566-7c723fd7eb65\") " pod="openstack/glance-default-external-api-0" Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.823343 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4da5a9db-df84-4b71-b566-7c723fd7eb65-scripts\") pod \"glance-default-external-api-0\" (UID: \"4da5a9db-df84-4b71-b566-7c723fd7eb65\") " pod="openstack/glance-default-external-api-0" Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.823523 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4da5a9db-df84-4b71-b566-7c723fd7eb65-logs\") pod \"glance-default-external-api-0\" (UID: \"4da5a9db-df84-4b71-b566-7c723fd7eb65\") " pod="openstack/glance-default-external-api-0" Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.824511 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4da5a9db-df84-4b71-b566-7c723fd7eb65-config-data\") pod \"glance-default-external-api-0\" (UID: \"4da5a9db-df84-4b71-b566-7c723fd7eb65\") " pod="openstack/glance-default-external-api-0" Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.824889 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4da5a9db-df84-4b71-b566-7c723fd7eb65-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"4da5a9db-df84-4b71-b566-7c723fd7eb65\") " pod="openstack/glance-default-external-api-0" Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.830045 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4da5a9db-df84-4b71-b566-7c723fd7eb65-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"4da5a9db-df84-4b71-b566-7c723fd7eb65\") " pod="openstack/glance-default-external-api-0" Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.838377 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z4vb9\" (UniqueName: \"kubernetes.io/projected/4da5a9db-df84-4b71-b566-7c723fd7eb65-kube-api-access-z4vb9\") pod \"glance-default-external-api-0\" (UID: \"4da5a9db-df84-4b71-b566-7c723fd7eb65\") " pod="openstack/glance-default-external-api-0" Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.886685 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"4da5a9db-df84-4b71-b566-7c723fd7eb65\") " pod="openstack/glance-default-external-api-0" Oct 06 13:57:55 crc kubenswrapper[4757]: I1006 13:57:55.932576 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 06 13:57:56 crc kubenswrapper[4757]: I1006 13:57:56.197533 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3319038f-4b3b-47c4-880b-6dc356283248" path="/var/lib/kubelet/pods/3319038f-4b3b-47c4-880b-6dc356283248/volumes" Oct 06 13:57:56 crc kubenswrapper[4757]: I1006 13:57:56.199953 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162" path="/var/lib/kubelet/pods/e9d2c09c-f5f0-4e7a-b3e0-1c15108a8162/volumes" Oct 06 13:57:56 crc kubenswrapper[4757]: W1006 13:57:56.297337 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5a2f1fd8_9191_40cd_9edb_6a681214aaa9.slice/crio-dd2060be9b6e0c61009bca07a311cf253468db4ae06c7b23b09a8177a5d40b53 WatchSource:0}: Error finding container dd2060be9b6e0c61009bca07a311cf253468db4ae06c7b23b09a8177a5d40b53: Status 404 returned error can't find the container with id dd2060be9b6e0c61009bca07a311cf253468db4ae06c7b23b09a8177a5d40b53 Oct 06 13:57:56 crc kubenswrapper[4757]: I1006 13:57:56.301141 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 06 13:57:56 crc kubenswrapper[4757]: I1006 13:57:56.343327 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"5a2f1fd8-9191-40cd-9edb-6a681214aaa9","Type":"ContainerStarted","Data":"dd2060be9b6e0c61009bca07a311cf253468db4ae06c7b23b09a8177a5d40b53"} Oct 06 13:57:56 crc kubenswrapper[4757]: I1006 13:57:56.362001 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"35e902eb-73d7-4f08-b184-3b4804d7b483","Type":"ContainerStarted","Data":"07e510759343c92d732cc4a9dccfa83c6d7e1dd25484f517c514e687ae396c74"} Oct 06 13:57:56 crc kubenswrapper[4757]: I1006 13:57:56.512435 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 06 13:57:57 crc kubenswrapper[4757]: I1006 13:57:57.099935 4757 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/glance-default-internal-api-0" podUID="cc62b42c-2672-4413-9768-4949a52c7659" containerName="glance-log" probeResult="failure" output="Get \"https://10.217.0.150:9292/healthcheck\": read tcp 10.217.0.2:43420->10.217.0.150:9292: read: connection reset by peer" Oct 06 13:57:57 crc kubenswrapper[4757]: I1006 13:57:57.100213 4757 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/glance-default-internal-api-0" podUID="cc62b42c-2672-4413-9768-4949a52c7659" containerName="glance-httpd" probeResult="failure" output="Get \"https://10.217.0.150:9292/healthcheck\": read tcp 10.217.0.2:43418->10.217.0.150:9292: read: connection reset by peer" Oct 06 13:57:57 crc kubenswrapper[4757]: I1006 13:57:57.393033 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"5a2f1fd8-9191-40cd-9edb-6a681214aaa9","Type":"ContainerStarted","Data":"28e913a804b505e4f014857f7b6a17fb5e76553cd58c85ad5dd061ab65fa664b"} Oct 06 13:57:57 crc kubenswrapper[4757]: I1006 13:57:57.396504 4757 generic.go:334] "Generic (PLEG): container finished" podID="cc62b42c-2672-4413-9768-4949a52c7659" containerID="3ef57524bb68d3a21d10583387c6e1165591d0cb469902192d8fe145b8a0a9db" exitCode=0 Oct 06 13:57:57 crc kubenswrapper[4757]: I1006 13:57:57.396561 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"cc62b42c-2672-4413-9768-4949a52c7659","Type":"ContainerDied","Data":"3ef57524bb68d3a21d10583387c6e1165591d0cb469902192d8fe145b8a0a9db"} Oct 06 13:57:57 crc kubenswrapper[4757]: I1006 13:57:57.415246 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"4da5a9db-df84-4b71-b566-7c723fd7eb65","Type":"ContainerStarted","Data":"0cb8bd1a995fc5d7d87c203ebb692a3ce29bbd8e20024032b2050c0645baebc6"} Oct 06 13:57:57 crc kubenswrapper[4757]: I1006 13:57:57.415292 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"4da5a9db-df84-4b71-b566-7c723fd7eb65","Type":"ContainerStarted","Data":"261f7e3c6d4c3ed7d9173122ab4fee7f0a1c905bbd1f110cf3a80f6ff3e511fb"} Oct 06 13:57:57 crc kubenswrapper[4757]: I1006 13:57:57.611514 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 06 13:57:57 crc kubenswrapper[4757]: I1006 13:57:57.723139 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cc62b42c-2672-4413-9768-4949a52c7659-config-data\") pod \"cc62b42c-2672-4413-9768-4949a52c7659\" (UID: \"cc62b42c-2672-4413-9768-4949a52c7659\") " Oct 06 13:57:57 crc kubenswrapper[4757]: I1006 13:57:57.723206 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cc62b42c-2672-4413-9768-4949a52c7659-internal-tls-certs\") pod \"cc62b42c-2672-4413-9768-4949a52c7659\" (UID: \"cc62b42c-2672-4413-9768-4949a52c7659\") " Oct 06 13:57:57 crc kubenswrapper[4757]: I1006 13:57:57.723303 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc62b42c-2672-4413-9768-4949a52c7659-combined-ca-bundle\") pod \"cc62b42c-2672-4413-9768-4949a52c7659\" (UID: \"cc62b42c-2672-4413-9768-4949a52c7659\") " Oct 06 13:57:57 crc kubenswrapper[4757]: I1006 13:57:57.723333 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cc62b42c-2672-4413-9768-4949a52c7659-scripts\") pod \"cc62b42c-2672-4413-9768-4949a52c7659\" (UID: \"cc62b42c-2672-4413-9768-4949a52c7659\") " Oct 06 13:57:57 crc kubenswrapper[4757]: I1006 13:57:57.723390 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"cc62b42c-2672-4413-9768-4949a52c7659\" (UID: \"cc62b42c-2672-4413-9768-4949a52c7659\") " Oct 06 13:57:57 crc kubenswrapper[4757]: I1006 13:57:57.723427 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/cc62b42c-2672-4413-9768-4949a52c7659-httpd-run\") pod \"cc62b42c-2672-4413-9768-4949a52c7659\" (UID: \"cc62b42c-2672-4413-9768-4949a52c7659\") " Oct 06 13:57:57 crc kubenswrapper[4757]: I1006 13:57:57.723464 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cc62b42c-2672-4413-9768-4949a52c7659-logs\") pod \"cc62b42c-2672-4413-9768-4949a52c7659\" (UID: \"cc62b42c-2672-4413-9768-4949a52c7659\") " Oct 06 13:57:57 crc kubenswrapper[4757]: I1006 13:57:57.723488 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7q8l4\" (UniqueName: \"kubernetes.io/projected/cc62b42c-2672-4413-9768-4949a52c7659-kube-api-access-7q8l4\") pod \"cc62b42c-2672-4413-9768-4949a52c7659\" (UID: \"cc62b42c-2672-4413-9768-4949a52c7659\") " Oct 06 13:57:57 crc kubenswrapper[4757]: I1006 13:57:57.729988 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cc62b42c-2672-4413-9768-4949a52c7659-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "cc62b42c-2672-4413-9768-4949a52c7659" (UID: "cc62b42c-2672-4413-9768-4949a52c7659"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 13:57:57 crc kubenswrapper[4757]: I1006 13:57:57.730052 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cc62b42c-2672-4413-9768-4949a52c7659-kube-api-access-7q8l4" (OuterVolumeSpecName: "kube-api-access-7q8l4") pod "cc62b42c-2672-4413-9768-4949a52c7659" (UID: "cc62b42c-2672-4413-9768-4949a52c7659"). InnerVolumeSpecName "kube-api-access-7q8l4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:57:57 crc kubenswrapper[4757]: I1006 13:57:57.730358 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cc62b42c-2672-4413-9768-4949a52c7659-scripts" (OuterVolumeSpecName: "scripts") pod "cc62b42c-2672-4413-9768-4949a52c7659" (UID: "cc62b42c-2672-4413-9768-4949a52c7659"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:57:57 crc kubenswrapper[4757]: I1006 13:57:57.730856 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cc62b42c-2672-4413-9768-4949a52c7659-logs" (OuterVolumeSpecName: "logs") pod "cc62b42c-2672-4413-9768-4949a52c7659" (UID: "cc62b42c-2672-4413-9768-4949a52c7659"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 13:57:57 crc kubenswrapper[4757]: I1006 13:57:57.755359 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage02-crc" (OuterVolumeSpecName: "glance") pod "cc62b42c-2672-4413-9768-4949a52c7659" (UID: "cc62b42c-2672-4413-9768-4949a52c7659"). InnerVolumeSpecName "local-storage02-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 06 13:57:57 crc kubenswrapper[4757]: I1006 13:57:57.787436 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cc62b42c-2672-4413-9768-4949a52c7659-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cc62b42c-2672-4413-9768-4949a52c7659" (UID: "cc62b42c-2672-4413-9768-4949a52c7659"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:57:57 crc kubenswrapper[4757]: I1006 13:57:57.826540 4757 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc62b42c-2672-4413-9768-4949a52c7659-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 06 13:57:57 crc kubenswrapper[4757]: I1006 13:57:57.827214 4757 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cc62b42c-2672-4413-9768-4949a52c7659-scripts\") on node \"crc\" DevicePath \"\"" Oct 06 13:57:57 crc kubenswrapper[4757]: I1006 13:57:57.827339 4757 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" " Oct 06 13:57:57 crc kubenswrapper[4757]: I1006 13:57:57.827466 4757 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/cc62b42c-2672-4413-9768-4949a52c7659-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 06 13:57:57 crc kubenswrapper[4757]: I1006 13:57:57.827549 4757 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cc62b42c-2672-4413-9768-4949a52c7659-logs\") on node \"crc\" DevicePath \"\"" Oct 06 13:57:57 crc kubenswrapper[4757]: I1006 13:57:57.827627 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7q8l4\" (UniqueName: \"kubernetes.io/projected/cc62b42c-2672-4413-9768-4949a52c7659-kube-api-access-7q8l4\") on node \"crc\" DevicePath \"\"" Oct 06 13:57:57 crc kubenswrapper[4757]: I1006 13:57:57.853236 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cc62b42c-2672-4413-9768-4949a52c7659-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "cc62b42c-2672-4413-9768-4949a52c7659" (UID: "cc62b42c-2672-4413-9768-4949a52c7659"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:57:57 crc kubenswrapper[4757]: I1006 13:57:57.868697 4757 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage02-crc" (UniqueName: "kubernetes.io/local-volume/local-storage02-crc") on node "crc" Oct 06 13:57:57 crc kubenswrapper[4757]: I1006 13:57:57.920151 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cc62b42c-2672-4413-9768-4949a52c7659-config-data" (OuterVolumeSpecName: "config-data") pod "cc62b42c-2672-4413-9768-4949a52c7659" (UID: "cc62b42c-2672-4413-9768-4949a52c7659"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:57:57 crc kubenswrapper[4757]: I1006 13:57:57.920637 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-688cb45c44-gmn5j" Oct 06 13:57:57 crc kubenswrapper[4757]: I1006 13:57:57.929433 4757 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cc62b42c-2672-4413-9768-4949a52c7659-config-data\") on node \"crc\" DevicePath \"\"" Oct 06 13:57:57 crc kubenswrapper[4757]: I1006 13:57:57.929464 4757 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cc62b42c-2672-4413-9768-4949a52c7659-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 06 13:57:57 crc kubenswrapper[4757]: I1006 13:57:57.929474 4757 reconciler_common.go:293] "Volume detached for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" DevicePath \"\"" Oct 06 13:57:58 crc kubenswrapper[4757]: I1006 13:57:58.030441 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/2c70918c-e944-47ef-8c0c-bbf476d75a77-httpd-config\") pod \"2c70918c-e944-47ef-8c0c-bbf476d75a77\" (UID: \"2c70918c-e944-47ef-8c0c-bbf476d75a77\") " Oct 06 13:57:58 crc kubenswrapper[4757]: I1006 13:57:58.030620 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cv458\" (UniqueName: \"kubernetes.io/projected/2c70918c-e944-47ef-8c0c-bbf476d75a77-kube-api-access-cv458\") pod \"2c70918c-e944-47ef-8c0c-bbf476d75a77\" (UID: \"2c70918c-e944-47ef-8c0c-bbf476d75a77\") " Oct 06 13:57:58 crc kubenswrapper[4757]: I1006 13:57:58.030655 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/2c70918c-e944-47ef-8c0c-bbf476d75a77-config\") pod \"2c70918c-e944-47ef-8c0c-bbf476d75a77\" (UID: \"2c70918c-e944-47ef-8c0c-bbf476d75a77\") " Oct 06 13:57:58 crc kubenswrapper[4757]: I1006 13:57:58.030683 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2c70918c-e944-47ef-8c0c-bbf476d75a77-combined-ca-bundle\") pod \"2c70918c-e944-47ef-8c0c-bbf476d75a77\" (UID: \"2c70918c-e944-47ef-8c0c-bbf476d75a77\") " Oct 06 13:57:58 crc kubenswrapper[4757]: I1006 13:57:58.030728 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/2c70918c-e944-47ef-8c0c-bbf476d75a77-ovndb-tls-certs\") pod \"2c70918c-e944-47ef-8c0c-bbf476d75a77\" (UID: \"2c70918c-e944-47ef-8c0c-bbf476d75a77\") " Oct 06 13:57:58 crc kubenswrapper[4757]: I1006 13:57:58.036843 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2c70918c-e944-47ef-8c0c-bbf476d75a77-kube-api-access-cv458" (OuterVolumeSpecName: "kube-api-access-cv458") pod "2c70918c-e944-47ef-8c0c-bbf476d75a77" (UID: "2c70918c-e944-47ef-8c0c-bbf476d75a77"). InnerVolumeSpecName "kube-api-access-cv458". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:57:58 crc kubenswrapper[4757]: I1006 13:57:58.038247 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2c70918c-e944-47ef-8c0c-bbf476d75a77-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "2c70918c-e944-47ef-8c0c-bbf476d75a77" (UID: "2c70918c-e944-47ef-8c0c-bbf476d75a77"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:57:58 crc kubenswrapper[4757]: I1006 13:57:58.089404 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2c70918c-e944-47ef-8c0c-bbf476d75a77-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2c70918c-e944-47ef-8c0c-bbf476d75a77" (UID: "2c70918c-e944-47ef-8c0c-bbf476d75a77"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:57:58 crc kubenswrapper[4757]: I1006 13:57:58.114117 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2c70918c-e944-47ef-8c0c-bbf476d75a77-config" (OuterVolumeSpecName: "config") pod "2c70918c-e944-47ef-8c0c-bbf476d75a77" (UID: "2c70918c-e944-47ef-8c0c-bbf476d75a77"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:57:58 crc kubenswrapper[4757]: I1006 13:57:58.127631 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2c70918c-e944-47ef-8c0c-bbf476d75a77-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "2c70918c-e944-47ef-8c0c-bbf476d75a77" (UID: "2c70918c-e944-47ef-8c0c-bbf476d75a77"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:57:58 crc kubenswrapper[4757]: I1006 13:57:58.132607 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cv458\" (UniqueName: \"kubernetes.io/projected/2c70918c-e944-47ef-8c0c-bbf476d75a77-kube-api-access-cv458\") on node \"crc\" DevicePath \"\"" Oct 06 13:57:58 crc kubenswrapper[4757]: I1006 13:57:58.132643 4757 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/2c70918c-e944-47ef-8c0c-bbf476d75a77-config\") on node \"crc\" DevicePath \"\"" Oct 06 13:57:58 crc kubenswrapper[4757]: I1006 13:57:58.132654 4757 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2c70918c-e944-47ef-8c0c-bbf476d75a77-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 06 13:57:58 crc kubenswrapper[4757]: I1006 13:57:58.132662 4757 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/2c70918c-e944-47ef-8c0c-bbf476d75a77-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 06 13:57:58 crc kubenswrapper[4757]: I1006 13:57:58.132671 4757 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/2c70918c-e944-47ef-8c0c-bbf476d75a77-httpd-config\") on node \"crc\" DevicePath \"\"" Oct 06 13:57:58 crc kubenswrapper[4757]: I1006 13:57:58.426572 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"5a2f1fd8-9191-40cd-9edb-6a681214aaa9","Type":"ContainerStarted","Data":"0324863c9c638456508e30d35fe0f9846118f58caedb60fbed5e6e14f91c6b11"} Oct 06 13:57:58 crc kubenswrapper[4757]: I1006 13:57:58.430338 4757 generic.go:334] "Generic (PLEG): container finished" podID="2c70918c-e944-47ef-8c0c-bbf476d75a77" containerID="25dcda8ac297ba14d0e224a3fc094317b883ab5928767bb009570cf619d04053" exitCode=0 Oct 06 13:57:58 crc kubenswrapper[4757]: I1006 13:57:58.430400 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-688cb45c44-gmn5j" event={"ID":"2c70918c-e944-47ef-8c0c-bbf476d75a77","Type":"ContainerDied","Data":"25dcda8ac297ba14d0e224a3fc094317b883ab5928767bb009570cf619d04053"} Oct 06 13:57:58 crc kubenswrapper[4757]: I1006 13:57:58.430485 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-688cb45c44-gmn5j" event={"ID":"2c70918c-e944-47ef-8c0c-bbf476d75a77","Type":"ContainerDied","Data":"05015caba8141658e81fb8c70494b1d49d21c8e3f807be41bf3c14a2a360d83d"} Oct 06 13:57:58 crc kubenswrapper[4757]: I1006 13:57:58.430502 4757 scope.go:117] "RemoveContainer" containerID="b473a669d877f9b737770a744f0fe354417ac0f8915f067a05b66404ac5974c6" Oct 06 13:57:58 crc kubenswrapper[4757]: I1006 13:57:58.430714 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-688cb45c44-gmn5j" Oct 06 13:57:58 crc kubenswrapper[4757]: I1006 13:57:58.432999 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"cc62b42c-2672-4413-9768-4949a52c7659","Type":"ContainerDied","Data":"bea410e00f7dc5385089ba4ccbe81bf22c7880a228a73451cb880f197dfd5e95"} Oct 06 13:57:58 crc kubenswrapper[4757]: I1006 13:57:58.433166 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 06 13:57:58 crc kubenswrapper[4757]: I1006 13:57:58.436715 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"4da5a9db-df84-4b71-b566-7c723fd7eb65","Type":"ContainerStarted","Data":"fe4d0be031635e1711fdd14e7751dbf7c8f1ff14ed259fe9f2d2457646f22b8f"} Oct 06 13:57:58 crc kubenswrapper[4757]: I1006 13:57:58.440035 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"35e902eb-73d7-4f08-b184-3b4804d7b483","Type":"ContainerStarted","Data":"fbfb33efcd475c91096509bcaa55ef035d54e5862559c0a95a2cdd8a1709b427"} Oct 06 13:57:58 crc kubenswrapper[4757]: I1006 13:57:58.440164 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="35e902eb-73d7-4f08-b184-3b4804d7b483" containerName="ceilometer-central-agent" containerID="cri-o://3227955555c26be0206242a13ec3b1a049b08b322e0b2b0c3fd1e8cc76e0a76a" gracePeriod=30 Oct 06 13:57:58 crc kubenswrapper[4757]: I1006 13:57:58.440229 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 06 13:57:58 crc kubenswrapper[4757]: I1006 13:57:58.440236 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="35e902eb-73d7-4f08-b184-3b4804d7b483" containerName="proxy-httpd" containerID="cri-o://fbfb33efcd475c91096509bcaa55ef035d54e5862559c0a95a2cdd8a1709b427" gracePeriod=30 Oct 06 13:57:58 crc kubenswrapper[4757]: I1006 13:57:58.440272 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="35e902eb-73d7-4f08-b184-3b4804d7b483" containerName="sg-core" containerID="cri-o://07e510759343c92d732cc4a9dccfa83c6d7e1dd25484f517c514e687ae396c74" gracePeriod=30 Oct 06 13:57:58 crc kubenswrapper[4757]: I1006 13:57:58.440303 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="35e902eb-73d7-4f08-b184-3b4804d7b483" containerName="ceilometer-notification-agent" containerID="cri-o://928c6b86589c64328bce8ae329baea34e5559cfc23e8ec86853784a5d6880d10" gracePeriod=30 Oct 06 13:57:58 crc kubenswrapper[4757]: I1006 13:57:58.459709 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=3.459687667 podStartE2EDuration="3.459687667s" podCreationTimestamp="2025-10-06 13:57:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:57:58.4513664 +0000 UTC m=+1166.948684937" watchObservedRunningTime="2025-10-06 13:57:58.459687667 +0000 UTC m=+1166.957006204" Oct 06 13:57:58 crc kubenswrapper[4757]: I1006 13:57:58.465475 4757 scope.go:117] "RemoveContainer" containerID="25dcda8ac297ba14d0e224a3fc094317b883ab5928767bb009570cf619d04053" Oct 06 13:57:58 crc kubenswrapper[4757]: I1006 13:57:58.499507 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=8.679316191 podStartE2EDuration="13.49948788s" podCreationTimestamp="2025-10-06 13:57:45 +0000 UTC" firstStartedPulling="2025-10-06 13:57:52.362150304 +0000 UTC m=+1160.859468841" lastFinishedPulling="2025-10-06 13:57:57.182322003 +0000 UTC m=+1165.679640530" observedRunningTime="2025-10-06 13:57:58.481464493 +0000 UTC m=+1166.978783030" watchObservedRunningTime="2025-10-06 13:57:58.49948788 +0000 UTC m=+1166.996806417" Oct 06 13:57:58 crc kubenswrapper[4757]: I1006 13:57:58.506082 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 06 13:57:58 crc kubenswrapper[4757]: I1006 13:57:58.506790 4757 scope.go:117] "RemoveContainer" containerID="b473a669d877f9b737770a744f0fe354417ac0f8915f067a05b66404ac5974c6" Oct 06 13:57:58 crc kubenswrapper[4757]: E1006 13:57:58.507354 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b473a669d877f9b737770a744f0fe354417ac0f8915f067a05b66404ac5974c6\": container with ID starting with b473a669d877f9b737770a744f0fe354417ac0f8915f067a05b66404ac5974c6 not found: ID does not exist" containerID="b473a669d877f9b737770a744f0fe354417ac0f8915f067a05b66404ac5974c6" Oct 06 13:57:58 crc kubenswrapper[4757]: I1006 13:57:58.507491 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b473a669d877f9b737770a744f0fe354417ac0f8915f067a05b66404ac5974c6"} err="failed to get container status \"b473a669d877f9b737770a744f0fe354417ac0f8915f067a05b66404ac5974c6\": rpc error: code = NotFound desc = could not find container \"b473a669d877f9b737770a744f0fe354417ac0f8915f067a05b66404ac5974c6\": container with ID starting with b473a669d877f9b737770a744f0fe354417ac0f8915f067a05b66404ac5974c6 not found: ID does not exist" Oct 06 13:57:58 crc kubenswrapper[4757]: I1006 13:57:58.507599 4757 scope.go:117] "RemoveContainer" containerID="25dcda8ac297ba14d0e224a3fc094317b883ab5928767bb009570cf619d04053" Oct 06 13:57:58 crc kubenswrapper[4757]: E1006 13:57:58.511497 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"25dcda8ac297ba14d0e224a3fc094317b883ab5928767bb009570cf619d04053\": container with ID starting with 25dcda8ac297ba14d0e224a3fc094317b883ab5928767bb009570cf619d04053 not found: ID does not exist" containerID="25dcda8ac297ba14d0e224a3fc094317b883ab5928767bb009570cf619d04053" Oct 06 13:57:58 crc kubenswrapper[4757]: I1006 13:57:58.511638 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"25dcda8ac297ba14d0e224a3fc094317b883ab5928767bb009570cf619d04053"} err="failed to get container status \"25dcda8ac297ba14d0e224a3fc094317b883ab5928767bb009570cf619d04053\": rpc error: code = NotFound desc = could not find container \"25dcda8ac297ba14d0e224a3fc094317b883ab5928767bb009570cf619d04053\": container with ID starting with 25dcda8ac297ba14d0e224a3fc094317b883ab5928767bb009570cf619d04053 not found: ID does not exist" Oct 06 13:57:58 crc kubenswrapper[4757]: I1006 13:57:58.511780 4757 scope.go:117] "RemoveContainer" containerID="3ef57524bb68d3a21d10583387c6e1165591d0cb469902192d8fe145b8a0a9db" Oct 06 13:57:58 crc kubenswrapper[4757]: I1006 13:57:58.521990 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 06 13:57:58 crc kubenswrapper[4757]: I1006 13:57:58.530875 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-688cb45c44-gmn5j"] Oct 06 13:57:58 crc kubenswrapper[4757]: I1006 13:57:58.546071 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 06 13:57:58 crc kubenswrapper[4757]: E1006 13:57:58.546480 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cc62b42c-2672-4413-9768-4949a52c7659" containerName="glance-httpd" Oct 06 13:57:58 crc kubenswrapper[4757]: I1006 13:57:58.546491 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="cc62b42c-2672-4413-9768-4949a52c7659" containerName="glance-httpd" Oct 06 13:57:58 crc kubenswrapper[4757]: E1006 13:57:58.546506 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c70918c-e944-47ef-8c0c-bbf476d75a77" containerName="neutron-httpd" Oct 06 13:57:58 crc kubenswrapper[4757]: I1006 13:57:58.546512 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c70918c-e944-47ef-8c0c-bbf476d75a77" containerName="neutron-httpd" Oct 06 13:57:58 crc kubenswrapper[4757]: E1006 13:57:58.546531 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cc62b42c-2672-4413-9768-4949a52c7659" containerName="glance-log" Oct 06 13:57:58 crc kubenswrapper[4757]: I1006 13:57:58.546537 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="cc62b42c-2672-4413-9768-4949a52c7659" containerName="glance-log" Oct 06 13:57:58 crc kubenswrapper[4757]: E1006 13:57:58.546548 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c70918c-e944-47ef-8c0c-bbf476d75a77" containerName="neutron-api" Oct 06 13:57:58 crc kubenswrapper[4757]: I1006 13:57:58.546554 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c70918c-e944-47ef-8c0c-bbf476d75a77" containerName="neutron-api" Oct 06 13:57:58 crc kubenswrapper[4757]: I1006 13:57:58.546724 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="2c70918c-e944-47ef-8c0c-bbf476d75a77" containerName="neutron-httpd" Oct 06 13:57:58 crc kubenswrapper[4757]: I1006 13:57:58.546734 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="cc62b42c-2672-4413-9768-4949a52c7659" containerName="glance-httpd" Oct 06 13:57:58 crc kubenswrapper[4757]: I1006 13:57:58.546743 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="cc62b42c-2672-4413-9768-4949a52c7659" containerName="glance-log" Oct 06 13:57:58 crc kubenswrapper[4757]: I1006 13:57:58.546756 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="2c70918c-e944-47ef-8c0c-bbf476d75a77" containerName="neutron-api" Oct 06 13:57:58 crc kubenswrapper[4757]: I1006 13:57:58.554820 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 06 13:57:58 crc kubenswrapper[4757]: I1006 13:57:58.557666 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Oct 06 13:57:58 crc kubenswrapper[4757]: I1006 13:57:58.557868 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Oct 06 13:57:58 crc kubenswrapper[4757]: I1006 13:57:58.560053 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-688cb45c44-gmn5j"] Oct 06 13:57:58 crc kubenswrapper[4757]: I1006 13:57:58.561017 4757 scope.go:117] "RemoveContainer" containerID="fe001ee38925cca5382d9fe049d7e9db56dc87cbbb2fcf7ea31b331f5f614f78" Oct 06 13:57:58 crc kubenswrapper[4757]: I1006 13:57:58.605143 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 06 13:57:58 crc kubenswrapper[4757]: I1006 13:57:58.611966 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=3.611949767 podStartE2EDuration="3.611949767s" podCreationTimestamp="2025-10-06 13:57:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:57:58.561776942 +0000 UTC m=+1167.059095489" watchObservedRunningTime="2025-10-06 13:57:58.611949767 +0000 UTC m=+1167.109268304" Oct 06 13:57:58 crc kubenswrapper[4757]: I1006 13:57:58.645469 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/78a4414b-7eec-457f-b08c-aeb719ffc320-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"78a4414b-7eec-457f-b08c-aeb719ffc320\") " pod="openstack/glance-default-internal-api-0" Oct 06 13:57:58 crc kubenswrapper[4757]: I1006 13:57:58.645540 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-internal-api-0\" (UID: \"78a4414b-7eec-457f-b08c-aeb719ffc320\") " pod="openstack/glance-default-internal-api-0" Oct 06 13:57:58 crc kubenswrapper[4757]: I1006 13:57:58.645572 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/78a4414b-7eec-457f-b08c-aeb719ffc320-logs\") pod \"glance-default-internal-api-0\" (UID: \"78a4414b-7eec-457f-b08c-aeb719ffc320\") " pod="openstack/glance-default-internal-api-0" Oct 06 13:57:58 crc kubenswrapper[4757]: I1006 13:57:58.645638 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xzswt\" (UniqueName: \"kubernetes.io/projected/78a4414b-7eec-457f-b08c-aeb719ffc320-kube-api-access-xzswt\") pod \"glance-default-internal-api-0\" (UID: \"78a4414b-7eec-457f-b08c-aeb719ffc320\") " pod="openstack/glance-default-internal-api-0" Oct 06 13:57:58 crc kubenswrapper[4757]: I1006 13:57:58.645671 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/78a4414b-7eec-457f-b08c-aeb719ffc320-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"78a4414b-7eec-457f-b08c-aeb719ffc320\") " pod="openstack/glance-default-internal-api-0" Oct 06 13:57:58 crc kubenswrapper[4757]: I1006 13:57:58.645692 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/78a4414b-7eec-457f-b08c-aeb719ffc320-scripts\") pod \"glance-default-internal-api-0\" (UID: \"78a4414b-7eec-457f-b08c-aeb719ffc320\") " pod="openstack/glance-default-internal-api-0" Oct 06 13:57:58 crc kubenswrapper[4757]: I1006 13:57:58.645706 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/78a4414b-7eec-457f-b08c-aeb719ffc320-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"78a4414b-7eec-457f-b08c-aeb719ffc320\") " pod="openstack/glance-default-internal-api-0" Oct 06 13:57:58 crc kubenswrapper[4757]: I1006 13:57:58.645748 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/78a4414b-7eec-457f-b08c-aeb719ffc320-config-data\") pod \"glance-default-internal-api-0\" (UID: \"78a4414b-7eec-457f-b08c-aeb719ffc320\") " pod="openstack/glance-default-internal-api-0" Oct 06 13:57:58 crc kubenswrapper[4757]: I1006 13:57:58.747112 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-internal-api-0\" (UID: \"78a4414b-7eec-457f-b08c-aeb719ffc320\") " pod="openstack/glance-default-internal-api-0" Oct 06 13:57:58 crc kubenswrapper[4757]: I1006 13:57:58.747195 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/78a4414b-7eec-457f-b08c-aeb719ffc320-logs\") pod \"glance-default-internal-api-0\" (UID: \"78a4414b-7eec-457f-b08c-aeb719ffc320\") " pod="openstack/glance-default-internal-api-0" Oct 06 13:57:58 crc kubenswrapper[4757]: I1006 13:57:58.747269 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xzswt\" (UniqueName: \"kubernetes.io/projected/78a4414b-7eec-457f-b08c-aeb719ffc320-kube-api-access-xzswt\") pod \"glance-default-internal-api-0\" (UID: \"78a4414b-7eec-457f-b08c-aeb719ffc320\") " pod="openstack/glance-default-internal-api-0" Oct 06 13:57:58 crc kubenswrapper[4757]: I1006 13:57:58.747307 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/78a4414b-7eec-457f-b08c-aeb719ffc320-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"78a4414b-7eec-457f-b08c-aeb719ffc320\") " pod="openstack/glance-default-internal-api-0" Oct 06 13:57:58 crc kubenswrapper[4757]: I1006 13:57:58.747325 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/78a4414b-7eec-457f-b08c-aeb719ffc320-scripts\") pod \"glance-default-internal-api-0\" (UID: \"78a4414b-7eec-457f-b08c-aeb719ffc320\") " pod="openstack/glance-default-internal-api-0" Oct 06 13:57:58 crc kubenswrapper[4757]: I1006 13:57:58.747342 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/78a4414b-7eec-457f-b08c-aeb719ffc320-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"78a4414b-7eec-457f-b08c-aeb719ffc320\") " pod="openstack/glance-default-internal-api-0" Oct 06 13:57:58 crc kubenswrapper[4757]: I1006 13:57:58.747380 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/78a4414b-7eec-457f-b08c-aeb719ffc320-config-data\") pod \"glance-default-internal-api-0\" (UID: \"78a4414b-7eec-457f-b08c-aeb719ffc320\") " pod="openstack/glance-default-internal-api-0" Oct 06 13:57:58 crc kubenswrapper[4757]: I1006 13:57:58.747400 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/78a4414b-7eec-457f-b08c-aeb719ffc320-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"78a4414b-7eec-457f-b08c-aeb719ffc320\") " pod="openstack/glance-default-internal-api-0" Oct 06 13:57:58 crc kubenswrapper[4757]: I1006 13:57:58.747566 4757 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-internal-api-0\" (UID: \"78a4414b-7eec-457f-b08c-aeb719ffc320\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/glance-default-internal-api-0" Oct 06 13:57:58 crc kubenswrapper[4757]: I1006 13:57:58.747910 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/78a4414b-7eec-457f-b08c-aeb719ffc320-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"78a4414b-7eec-457f-b08c-aeb719ffc320\") " pod="openstack/glance-default-internal-api-0" Oct 06 13:57:58 crc kubenswrapper[4757]: I1006 13:57:58.748527 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/78a4414b-7eec-457f-b08c-aeb719ffc320-logs\") pod \"glance-default-internal-api-0\" (UID: \"78a4414b-7eec-457f-b08c-aeb719ffc320\") " pod="openstack/glance-default-internal-api-0" Oct 06 13:57:58 crc kubenswrapper[4757]: I1006 13:57:58.758933 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/78a4414b-7eec-457f-b08c-aeb719ffc320-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"78a4414b-7eec-457f-b08c-aeb719ffc320\") " pod="openstack/glance-default-internal-api-0" Oct 06 13:57:58 crc kubenswrapper[4757]: I1006 13:57:58.759009 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/78a4414b-7eec-457f-b08c-aeb719ffc320-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"78a4414b-7eec-457f-b08c-aeb719ffc320\") " pod="openstack/glance-default-internal-api-0" Oct 06 13:57:58 crc kubenswrapper[4757]: I1006 13:57:58.759933 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/78a4414b-7eec-457f-b08c-aeb719ffc320-scripts\") pod \"glance-default-internal-api-0\" (UID: \"78a4414b-7eec-457f-b08c-aeb719ffc320\") " pod="openstack/glance-default-internal-api-0" Oct 06 13:57:58 crc kubenswrapper[4757]: I1006 13:57:58.762267 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/78a4414b-7eec-457f-b08c-aeb719ffc320-config-data\") pod \"glance-default-internal-api-0\" (UID: \"78a4414b-7eec-457f-b08c-aeb719ffc320\") " pod="openstack/glance-default-internal-api-0" Oct 06 13:57:58 crc kubenswrapper[4757]: I1006 13:57:58.773045 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xzswt\" (UniqueName: \"kubernetes.io/projected/78a4414b-7eec-457f-b08c-aeb719ffc320-kube-api-access-xzswt\") pod \"glance-default-internal-api-0\" (UID: \"78a4414b-7eec-457f-b08c-aeb719ffc320\") " pod="openstack/glance-default-internal-api-0" Oct 06 13:57:58 crc kubenswrapper[4757]: I1006 13:57:58.799916 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-internal-api-0\" (UID: \"78a4414b-7eec-457f-b08c-aeb719ffc320\") " pod="openstack/glance-default-internal-api-0" Oct 06 13:57:58 crc kubenswrapper[4757]: I1006 13:57:58.947085 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 06 13:57:59 crc kubenswrapper[4757]: I1006 13:57:59.451879 4757 generic.go:334] "Generic (PLEG): container finished" podID="35e902eb-73d7-4f08-b184-3b4804d7b483" containerID="fbfb33efcd475c91096509bcaa55ef035d54e5862559c0a95a2cdd8a1709b427" exitCode=0 Oct 06 13:57:59 crc kubenswrapper[4757]: I1006 13:57:59.452539 4757 generic.go:334] "Generic (PLEG): container finished" podID="35e902eb-73d7-4f08-b184-3b4804d7b483" containerID="07e510759343c92d732cc4a9dccfa83c6d7e1dd25484f517c514e687ae396c74" exitCode=2 Oct 06 13:57:59 crc kubenswrapper[4757]: I1006 13:57:59.452556 4757 generic.go:334] "Generic (PLEG): container finished" podID="35e902eb-73d7-4f08-b184-3b4804d7b483" containerID="928c6b86589c64328bce8ae329baea34e5559cfc23e8ec86853784a5d6880d10" exitCode=0 Oct 06 13:57:59 crc kubenswrapper[4757]: I1006 13:57:59.451947 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"35e902eb-73d7-4f08-b184-3b4804d7b483","Type":"ContainerDied","Data":"fbfb33efcd475c91096509bcaa55ef035d54e5862559c0a95a2cdd8a1709b427"} Oct 06 13:57:59 crc kubenswrapper[4757]: I1006 13:57:59.452629 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"35e902eb-73d7-4f08-b184-3b4804d7b483","Type":"ContainerDied","Data":"07e510759343c92d732cc4a9dccfa83c6d7e1dd25484f517c514e687ae396c74"} Oct 06 13:57:59 crc kubenswrapper[4757]: I1006 13:57:59.452645 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"35e902eb-73d7-4f08-b184-3b4804d7b483","Type":"ContainerDied","Data":"928c6b86589c64328bce8ae329baea34e5559cfc23e8ec86853784a5d6880d10"} Oct 06 13:57:59 crc kubenswrapper[4757]: I1006 13:57:59.544281 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 06 13:57:59 crc kubenswrapper[4757]: W1006 13:57:59.552344 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod78a4414b_7eec_457f_b08c_aeb719ffc320.slice/crio-41030caccef7e2b1af92bd1ad914e58506845cf400fccf7b0184bf3f70f62852 WatchSource:0}: Error finding container 41030caccef7e2b1af92bd1ad914e58506845cf400fccf7b0184bf3f70f62852: Status 404 returned error can't find the container with id 41030caccef7e2b1af92bd1ad914e58506845cf400fccf7b0184bf3f70f62852 Oct 06 13:58:00 crc kubenswrapper[4757]: I1006 13:58:00.200682 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2c70918c-e944-47ef-8c0c-bbf476d75a77" path="/var/lib/kubelet/pods/2c70918c-e944-47ef-8c0c-bbf476d75a77/volumes" Oct 06 13:58:00 crc kubenswrapper[4757]: I1006 13:58:00.202154 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cc62b42c-2672-4413-9768-4949a52c7659" path="/var/lib/kubelet/pods/cc62b42c-2672-4413-9768-4949a52c7659/volumes" Oct 06 13:58:00 crc kubenswrapper[4757]: I1006 13:58:00.485471 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"78a4414b-7eec-457f-b08c-aeb719ffc320","Type":"ContainerStarted","Data":"17ce35871062aaa7a1aa4139c7f03addfd9e5a2514de88d4e098daa173fc41a8"} Oct 06 13:58:00 crc kubenswrapper[4757]: I1006 13:58:00.485763 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"78a4414b-7eec-457f-b08c-aeb719ffc320","Type":"ContainerStarted","Data":"41030caccef7e2b1af92bd1ad914e58506845cf400fccf7b0184bf3f70f62852"} Oct 06 13:58:00 crc kubenswrapper[4757]: I1006 13:58:00.490868 4757 generic.go:334] "Generic (PLEG): container finished" podID="35e902eb-73d7-4f08-b184-3b4804d7b483" containerID="3227955555c26be0206242a13ec3b1a049b08b322e0b2b0c3fd1e8cc76e0a76a" exitCode=0 Oct 06 13:58:00 crc kubenswrapper[4757]: I1006 13:58:00.490896 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"35e902eb-73d7-4f08-b184-3b4804d7b483","Type":"ContainerDied","Data":"3227955555c26be0206242a13ec3b1a049b08b322e0b2b0c3fd1e8cc76e0a76a"} Oct 06 13:58:00 crc kubenswrapper[4757]: I1006 13:58:00.777961 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Oct 06 13:58:00 crc kubenswrapper[4757]: I1006 13:58:00.798165 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 06 13:58:00 crc kubenswrapper[4757]: I1006 13:58:00.902822 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/35e902eb-73d7-4f08-b184-3b4804d7b483-run-httpd\") pod \"35e902eb-73d7-4f08-b184-3b4804d7b483\" (UID: \"35e902eb-73d7-4f08-b184-3b4804d7b483\") " Oct 06 13:58:00 crc kubenswrapper[4757]: I1006 13:58:00.902909 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/35e902eb-73d7-4f08-b184-3b4804d7b483-config-data\") pod \"35e902eb-73d7-4f08-b184-3b4804d7b483\" (UID: \"35e902eb-73d7-4f08-b184-3b4804d7b483\") " Oct 06 13:58:00 crc kubenswrapper[4757]: I1006 13:58:00.902940 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/35e902eb-73d7-4f08-b184-3b4804d7b483-combined-ca-bundle\") pod \"35e902eb-73d7-4f08-b184-3b4804d7b483\" (UID: \"35e902eb-73d7-4f08-b184-3b4804d7b483\") " Oct 06 13:58:00 crc kubenswrapper[4757]: I1006 13:58:00.902981 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/35e902eb-73d7-4f08-b184-3b4804d7b483-log-httpd\") pod \"35e902eb-73d7-4f08-b184-3b4804d7b483\" (UID: \"35e902eb-73d7-4f08-b184-3b4804d7b483\") " Oct 06 13:58:00 crc kubenswrapper[4757]: I1006 13:58:00.903033 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/35e902eb-73d7-4f08-b184-3b4804d7b483-scripts\") pod \"35e902eb-73d7-4f08-b184-3b4804d7b483\" (UID: \"35e902eb-73d7-4f08-b184-3b4804d7b483\") " Oct 06 13:58:00 crc kubenswrapper[4757]: I1006 13:58:00.903122 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mstz4\" (UniqueName: \"kubernetes.io/projected/35e902eb-73d7-4f08-b184-3b4804d7b483-kube-api-access-mstz4\") pod \"35e902eb-73d7-4f08-b184-3b4804d7b483\" (UID: \"35e902eb-73d7-4f08-b184-3b4804d7b483\") " Oct 06 13:58:00 crc kubenswrapper[4757]: I1006 13:58:00.903147 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/35e902eb-73d7-4f08-b184-3b4804d7b483-sg-core-conf-yaml\") pod \"35e902eb-73d7-4f08-b184-3b4804d7b483\" (UID: \"35e902eb-73d7-4f08-b184-3b4804d7b483\") " Oct 06 13:58:00 crc kubenswrapper[4757]: I1006 13:58:00.904875 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/35e902eb-73d7-4f08-b184-3b4804d7b483-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "35e902eb-73d7-4f08-b184-3b4804d7b483" (UID: "35e902eb-73d7-4f08-b184-3b4804d7b483"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 13:58:00 crc kubenswrapper[4757]: I1006 13:58:00.909329 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/35e902eb-73d7-4f08-b184-3b4804d7b483-kube-api-access-mstz4" (OuterVolumeSpecName: "kube-api-access-mstz4") pod "35e902eb-73d7-4f08-b184-3b4804d7b483" (UID: "35e902eb-73d7-4f08-b184-3b4804d7b483"). InnerVolumeSpecName "kube-api-access-mstz4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:58:00 crc kubenswrapper[4757]: I1006 13:58:00.909572 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/35e902eb-73d7-4f08-b184-3b4804d7b483-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "35e902eb-73d7-4f08-b184-3b4804d7b483" (UID: "35e902eb-73d7-4f08-b184-3b4804d7b483"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 13:58:00 crc kubenswrapper[4757]: I1006 13:58:00.912424 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/35e902eb-73d7-4f08-b184-3b4804d7b483-scripts" (OuterVolumeSpecName: "scripts") pod "35e902eb-73d7-4f08-b184-3b4804d7b483" (UID: "35e902eb-73d7-4f08-b184-3b4804d7b483"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:58:00 crc kubenswrapper[4757]: I1006 13:58:00.934758 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-d1d1-account-create-pfx4p"] Oct 06 13:58:00 crc kubenswrapper[4757]: E1006 13:58:00.935165 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35e902eb-73d7-4f08-b184-3b4804d7b483" containerName="ceilometer-notification-agent" Oct 06 13:58:00 crc kubenswrapper[4757]: I1006 13:58:00.935181 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="35e902eb-73d7-4f08-b184-3b4804d7b483" containerName="ceilometer-notification-agent" Oct 06 13:58:00 crc kubenswrapper[4757]: E1006 13:58:00.935203 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35e902eb-73d7-4f08-b184-3b4804d7b483" containerName="proxy-httpd" Oct 06 13:58:00 crc kubenswrapper[4757]: I1006 13:58:00.935210 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="35e902eb-73d7-4f08-b184-3b4804d7b483" containerName="proxy-httpd" Oct 06 13:58:00 crc kubenswrapper[4757]: E1006 13:58:00.935223 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35e902eb-73d7-4f08-b184-3b4804d7b483" containerName="ceilometer-central-agent" Oct 06 13:58:00 crc kubenswrapper[4757]: I1006 13:58:00.935230 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="35e902eb-73d7-4f08-b184-3b4804d7b483" containerName="ceilometer-central-agent" Oct 06 13:58:00 crc kubenswrapper[4757]: E1006 13:58:00.935243 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35e902eb-73d7-4f08-b184-3b4804d7b483" containerName="sg-core" Oct 06 13:58:00 crc kubenswrapper[4757]: I1006 13:58:00.935248 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="35e902eb-73d7-4f08-b184-3b4804d7b483" containerName="sg-core" Oct 06 13:58:00 crc kubenswrapper[4757]: I1006 13:58:00.935421 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="35e902eb-73d7-4f08-b184-3b4804d7b483" containerName="ceilometer-notification-agent" Oct 06 13:58:00 crc kubenswrapper[4757]: I1006 13:58:00.935432 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="35e902eb-73d7-4f08-b184-3b4804d7b483" containerName="proxy-httpd" Oct 06 13:58:00 crc kubenswrapper[4757]: I1006 13:58:00.935453 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="35e902eb-73d7-4f08-b184-3b4804d7b483" containerName="sg-core" Oct 06 13:58:00 crc kubenswrapper[4757]: I1006 13:58:00.935462 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="35e902eb-73d7-4f08-b184-3b4804d7b483" containerName="ceilometer-central-agent" Oct 06 13:58:00 crc kubenswrapper[4757]: I1006 13:58:00.936204 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-d1d1-account-create-pfx4p" Oct 06 13:58:00 crc kubenswrapper[4757]: I1006 13:58:00.941925 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/35e902eb-73d7-4f08-b184-3b4804d7b483-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "35e902eb-73d7-4f08-b184-3b4804d7b483" (UID: "35e902eb-73d7-4f08-b184-3b4804d7b483"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:58:00 crc kubenswrapper[4757]: I1006 13:58:00.942640 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Oct 06 13:58:00 crc kubenswrapper[4757]: I1006 13:58:00.961752 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-d1d1-account-create-pfx4p"] Oct 06 13:58:01 crc kubenswrapper[4757]: I1006 13:58:01.004645 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jcrc7\" (UniqueName: \"kubernetes.io/projected/2d124778-aecd-4366-9a84-ab8c2cb478b8-kube-api-access-jcrc7\") pod \"nova-api-d1d1-account-create-pfx4p\" (UID: \"2d124778-aecd-4366-9a84-ab8c2cb478b8\") " pod="openstack/nova-api-d1d1-account-create-pfx4p" Oct 06 13:58:01 crc kubenswrapper[4757]: I1006 13:58:01.004732 4757 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/35e902eb-73d7-4f08-b184-3b4804d7b483-scripts\") on node \"crc\" DevicePath \"\"" Oct 06 13:58:01 crc kubenswrapper[4757]: I1006 13:58:01.004746 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mstz4\" (UniqueName: \"kubernetes.io/projected/35e902eb-73d7-4f08-b184-3b4804d7b483-kube-api-access-mstz4\") on node \"crc\" DevicePath \"\"" Oct 06 13:58:01 crc kubenswrapper[4757]: I1006 13:58:01.004754 4757 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/35e902eb-73d7-4f08-b184-3b4804d7b483-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 06 13:58:01 crc kubenswrapper[4757]: I1006 13:58:01.004766 4757 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/35e902eb-73d7-4f08-b184-3b4804d7b483-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 06 13:58:01 crc kubenswrapper[4757]: I1006 13:58:01.004774 4757 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/35e902eb-73d7-4f08-b184-3b4804d7b483-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 06 13:58:01 crc kubenswrapper[4757]: I1006 13:58:01.009888 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/35e902eb-73d7-4f08-b184-3b4804d7b483-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "35e902eb-73d7-4f08-b184-3b4804d7b483" (UID: "35e902eb-73d7-4f08-b184-3b4804d7b483"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:58:01 crc kubenswrapper[4757]: I1006 13:58:01.026820 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/35e902eb-73d7-4f08-b184-3b4804d7b483-config-data" (OuterVolumeSpecName: "config-data") pod "35e902eb-73d7-4f08-b184-3b4804d7b483" (UID: "35e902eb-73d7-4f08-b184-3b4804d7b483"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:58:01 crc kubenswrapper[4757]: I1006 13:58:01.106245 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jcrc7\" (UniqueName: \"kubernetes.io/projected/2d124778-aecd-4366-9a84-ab8c2cb478b8-kube-api-access-jcrc7\") pod \"nova-api-d1d1-account-create-pfx4p\" (UID: \"2d124778-aecd-4366-9a84-ab8c2cb478b8\") " pod="openstack/nova-api-d1d1-account-create-pfx4p" Oct 06 13:58:01 crc kubenswrapper[4757]: I1006 13:58:01.106398 4757 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/35e902eb-73d7-4f08-b184-3b4804d7b483-config-data\") on node \"crc\" DevicePath \"\"" Oct 06 13:58:01 crc kubenswrapper[4757]: I1006 13:58:01.106415 4757 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/35e902eb-73d7-4f08-b184-3b4804d7b483-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 06 13:58:01 crc kubenswrapper[4757]: I1006 13:58:01.134330 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-0cee-account-create-hlzct"] Oct 06 13:58:01 crc kubenswrapper[4757]: I1006 13:58:01.135753 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-0cee-account-create-hlzct" Oct 06 13:58:01 crc kubenswrapper[4757]: I1006 13:58:01.140003 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Oct 06 13:58:01 crc kubenswrapper[4757]: I1006 13:58:01.146378 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jcrc7\" (UniqueName: \"kubernetes.io/projected/2d124778-aecd-4366-9a84-ab8c2cb478b8-kube-api-access-jcrc7\") pod \"nova-api-d1d1-account-create-pfx4p\" (UID: \"2d124778-aecd-4366-9a84-ab8c2cb478b8\") " pod="openstack/nova-api-d1d1-account-create-pfx4p" Oct 06 13:58:01 crc kubenswrapper[4757]: I1006 13:58:01.147112 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-0cee-account-create-hlzct"] Oct 06 13:58:01 crc kubenswrapper[4757]: I1006 13:58:01.208604 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2zz5x\" (UniqueName: \"kubernetes.io/projected/e3515117-5db7-4811-a21b-39aaaf03eda0-kube-api-access-2zz5x\") pod \"nova-cell0-0cee-account-create-hlzct\" (UID: \"e3515117-5db7-4811-a21b-39aaaf03eda0\") " pod="openstack/nova-cell0-0cee-account-create-hlzct" Oct 06 13:58:01 crc kubenswrapper[4757]: I1006 13:58:01.259085 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-d1d1-account-create-pfx4p" Oct 06 13:58:01 crc kubenswrapper[4757]: I1006 13:58:01.311775 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2zz5x\" (UniqueName: \"kubernetes.io/projected/e3515117-5db7-4811-a21b-39aaaf03eda0-kube-api-access-2zz5x\") pod \"nova-cell0-0cee-account-create-hlzct\" (UID: \"e3515117-5db7-4811-a21b-39aaaf03eda0\") " pod="openstack/nova-cell0-0cee-account-create-hlzct" Oct 06 13:58:01 crc kubenswrapper[4757]: I1006 13:58:01.358582 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-3d6a-account-create-ktj8k"] Oct 06 13:58:01 crc kubenswrapper[4757]: I1006 13:58:01.360453 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-3d6a-account-create-ktj8k"] Oct 06 13:58:01 crc kubenswrapper[4757]: I1006 13:58:01.360536 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-3d6a-account-create-ktj8k" Oct 06 13:58:01 crc kubenswrapper[4757]: I1006 13:58:01.364475 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Oct 06 13:58:01 crc kubenswrapper[4757]: I1006 13:58:01.381848 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2zz5x\" (UniqueName: \"kubernetes.io/projected/e3515117-5db7-4811-a21b-39aaaf03eda0-kube-api-access-2zz5x\") pod \"nova-cell0-0cee-account-create-hlzct\" (UID: \"e3515117-5db7-4811-a21b-39aaaf03eda0\") " pod="openstack/nova-cell0-0cee-account-create-hlzct" Oct 06 13:58:01 crc kubenswrapper[4757]: I1006 13:58:01.414039 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4zlb6\" (UniqueName: \"kubernetes.io/projected/d11e5852-b82d-4633-9102-b3153ff76ab8-kube-api-access-4zlb6\") pod \"nova-cell1-3d6a-account-create-ktj8k\" (UID: \"d11e5852-b82d-4633-9102-b3153ff76ab8\") " pod="openstack/nova-cell1-3d6a-account-create-ktj8k" Oct 06 13:58:01 crc kubenswrapper[4757]: I1006 13:58:01.506711 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"78a4414b-7eec-457f-b08c-aeb719ffc320","Type":"ContainerStarted","Data":"627201afc996e1b5ebc9ec2b49081817f4957580a028ef0ede8ac13918294d2e"} Oct 06 13:58:01 crc kubenswrapper[4757]: I1006 13:58:01.513493 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"35e902eb-73d7-4f08-b184-3b4804d7b483","Type":"ContainerDied","Data":"867d30eef6520c14dbf91a3f8d8fbe5f34232493bdb349ebc91ab068cd4093f2"} Oct 06 13:58:01 crc kubenswrapper[4757]: I1006 13:58:01.513560 4757 scope.go:117] "RemoveContainer" containerID="fbfb33efcd475c91096509bcaa55ef035d54e5862559c0a95a2cdd8a1709b427" Oct 06 13:58:01 crc kubenswrapper[4757]: I1006 13:58:01.513784 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 06 13:58:01 crc kubenswrapper[4757]: I1006 13:58:01.519076 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4zlb6\" (UniqueName: \"kubernetes.io/projected/d11e5852-b82d-4633-9102-b3153ff76ab8-kube-api-access-4zlb6\") pod \"nova-cell1-3d6a-account-create-ktj8k\" (UID: \"d11e5852-b82d-4633-9102-b3153ff76ab8\") " pod="openstack/nova-cell1-3d6a-account-create-ktj8k" Oct 06 13:58:01 crc kubenswrapper[4757]: I1006 13:58:01.542301 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-0cee-account-create-hlzct" Oct 06 13:58:01 crc kubenswrapper[4757]: I1006 13:58:01.556022 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4zlb6\" (UniqueName: \"kubernetes.io/projected/d11e5852-b82d-4633-9102-b3153ff76ab8-kube-api-access-4zlb6\") pod \"nova-cell1-3d6a-account-create-ktj8k\" (UID: \"d11e5852-b82d-4633-9102-b3153ff76ab8\") " pod="openstack/nova-cell1-3d6a-account-create-ktj8k" Oct 06 13:58:01 crc kubenswrapper[4757]: I1006 13:58:01.566158 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=3.566130995 podStartE2EDuration="3.566130995s" podCreationTimestamp="2025-10-06 13:57:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:58:01.532446478 +0000 UTC m=+1170.029765035" watchObservedRunningTime="2025-10-06 13:58:01.566130995 +0000 UTC m=+1170.063449532" Oct 06 13:58:01 crc kubenswrapper[4757]: I1006 13:58:01.569369 4757 scope.go:117] "RemoveContainer" containerID="07e510759343c92d732cc4a9dccfa83c6d7e1dd25484f517c514e687ae396c74" Oct 06 13:58:01 crc kubenswrapper[4757]: I1006 13:58:01.573758 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 06 13:58:01 crc kubenswrapper[4757]: I1006 13:58:01.588913 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 06 13:58:01 crc kubenswrapper[4757]: I1006 13:58:01.607947 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 06 13:58:01 crc kubenswrapper[4757]: I1006 13:58:01.610298 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 06 13:58:01 crc kubenswrapper[4757]: I1006 13:58:01.621366 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 06 13:58:01 crc kubenswrapper[4757]: I1006 13:58:01.634621 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 06 13:58:01 crc kubenswrapper[4757]: I1006 13:58:01.634895 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 06 13:58:01 crc kubenswrapper[4757]: I1006 13:58:01.644622 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-d1d1-account-create-pfx4p"] Oct 06 13:58:01 crc kubenswrapper[4757]: I1006 13:58:01.653917 4757 scope.go:117] "RemoveContainer" containerID="928c6b86589c64328bce8ae329baea34e5559cfc23e8ec86853784a5d6880d10" Oct 06 13:58:01 crc kubenswrapper[4757]: I1006 13:58:01.720102 4757 scope.go:117] "RemoveContainer" containerID="3227955555c26be0206242a13ec3b1a049b08b322e0b2b0c3fd1e8cc76e0a76a" Oct 06 13:58:01 crc kubenswrapper[4757]: I1006 13:58:01.729976 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-3d6a-account-create-ktj8k" Oct 06 13:58:01 crc kubenswrapper[4757]: I1006 13:58:01.829956 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/741fdac4-544e-4f02-b425-139415645595-scripts\") pod \"ceilometer-0\" (UID: \"741fdac4-544e-4f02-b425-139415645595\") " pod="openstack/ceilometer-0" Oct 06 13:58:01 crc kubenswrapper[4757]: I1006 13:58:01.830675 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/741fdac4-544e-4f02-b425-139415645595-run-httpd\") pod \"ceilometer-0\" (UID: \"741fdac4-544e-4f02-b425-139415645595\") " pod="openstack/ceilometer-0" Oct 06 13:58:01 crc kubenswrapper[4757]: I1006 13:58:01.830703 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/741fdac4-544e-4f02-b425-139415645595-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"741fdac4-544e-4f02-b425-139415645595\") " pod="openstack/ceilometer-0" Oct 06 13:58:01 crc kubenswrapper[4757]: I1006 13:58:01.830751 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/741fdac4-544e-4f02-b425-139415645595-config-data\") pod \"ceilometer-0\" (UID: \"741fdac4-544e-4f02-b425-139415645595\") " pod="openstack/ceilometer-0" Oct 06 13:58:01 crc kubenswrapper[4757]: I1006 13:58:01.830773 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k9rms\" (UniqueName: \"kubernetes.io/projected/741fdac4-544e-4f02-b425-139415645595-kube-api-access-k9rms\") pod \"ceilometer-0\" (UID: \"741fdac4-544e-4f02-b425-139415645595\") " pod="openstack/ceilometer-0" Oct 06 13:58:01 crc kubenswrapper[4757]: I1006 13:58:01.830797 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/741fdac4-544e-4f02-b425-139415645595-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"741fdac4-544e-4f02-b425-139415645595\") " pod="openstack/ceilometer-0" Oct 06 13:58:01 crc kubenswrapper[4757]: I1006 13:58:01.830825 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/741fdac4-544e-4f02-b425-139415645595-log-httpd\") pod \"ceilometer-0\" (UID: \"741fdac4-544e-4f02-b425-139415645595\") " pod="openstack/ceilometer-0" Oct 06 13:58:01 crc kubenswrapper[4757]: I1006 13:58:01.932071 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/741fdac4-544e-4f02-b425-139415645595-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"741fdac4-544e-4f02-b425-139415645595\") " pod="openstack/ceilometer-0" Oct 06 13:58:01 crc kubenswrapper[4757]: I1006 13:58:01.932137 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/741fdac4-544e-4f02-b425-139415645595-log-httpd\") pod \"ceilometer-0\" (UID: \"741fdac4-544e-4f02-b425-139415645595\") " pod="openstack/ceilometer-0" Oct 06 13:58:01 crc kubenswrapper[4757]: I1006 13:58:01.932199 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/741fdac4-544e-4f02-b425-139415645595-scripts\") pod \"ceilometer-0\" (UID: \"741fdac4-544e-4f02-b425-139415645595\") " pod="openstack/ceilometer-0" Oct 06 13:58:01 crc kubenswrapper[4757]: I1006 13:58:01.932261 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/741fdac4-544e-4f02-b425-139415645595-run-httpd\") pod \"ceilometer-0\" (UID: \"741fdac4-544e-4f02-b425-139415645595\") " pod="openstack/ceilometer-0" Oct 06 13:58:01 crc kubenswrapper[4757]: I1006 13:58:01.932283 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/741fdac4-544e-4f02-b425-139415645595-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"741fdac4-544e-4f02-b425-139415645595\") " pod="openstack/ceilometer-0" Oct 06 13:58:01 crc kubenswrapper[4757]: I1006 13:58:01.932345 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/741fdac4-544e-4f02-b425-139415645595-config-data\") pod \"ceilometer-0\" (UID: \"741fdac4-544e-4f02-b425-139415645595\") " pod="openstack/ceilometer-0" Oct 06 13:58:01 crc kubenswrapper[4757]: I1006 13:58:01.932365 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k9rms\" (UniqueName: \"kubernetes.io/projected/741fdac4-544e-4f02-b425-139415645595-kube-api-access-k9rms\") pod \"ceilometer-0\" (UID: \"741fdac4-544e-4f02-b425-139415645595\") " pod="openstack/ceilometer-0" Oct 06 13:58:01 crc kubenswrapper[4757]: I1006 13:58:01.933657 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/741fdac4-544e-4f02-b425-139415645595-run-httpd\") pod \"ceilometer-0\" (UID: \"741fdac4-544e-4f02-b425-139415645595\") " pod="openstack/ceilometer-0" Oct 06 13:58:01 crc kubenswrapper[4757]: I1006 13:58:01.933846 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/741fdac4-544e-4f02-b425-139415645595-log-httpd\") pod \"ceilometer-0\" (UID: \"741fdac4-544e-4f02-b425-139415645595\") " pod="openstack/ceilometer-0" Oct 06 13:58:01 crc kubenswrapper[4757]: I1006 13:58:01.941670 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/741fdac4-544e-4f02-b425-139415645595-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"741fdac4-544e-4f02-b425-139415645595\") " pod="openstack/ceilometer-0" Oct 06 13:58:01 crc kubenswrapper[4757]: I1006 13:58:01.941817 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/741fdac4-544e-4f02-b425-139415645595-scripts\") pod \"ceilometer-0\" (UID: \"741fdac4-544e-4f02-b425-139415645595\") " pod="openstack/ceilometer-0" Oct 06 13:58:01 crc kubenswrapper[4757]: I1006 13:58:01.944318 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/741fdac4-544e-4f02-b425-139415645595-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"741fdac4-544e-4f02-b425-139415645595\") " pod="openstack/ceilometer-0" Oct 06 13:58:01 crc kubenswrapper[4757]: I1006 13:58:01.949396 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/741fdac4-544e-4f02-b425-139415645595-config-data\") pod \"ceilometer-0\" (UID: \"741fdac4-544e-4f02-b425-139415645595\") " pod="openstack/ceilometer-0" Oct 06 13:58:01 crc kubenswrapper[4757]: I1006 13:58:01.953586 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k9rms\" (UniqueName: \"kubernetes.io/projected/741fdac4-544e-4f02-b425-139415645595-kube-api-access-k9rms\") pod \"ceilometer-0\" (UID: \"741fdac4-544e-4f02-b425-139415645595\") " pod="openstack/ceilometer-0" Oct 06 13:58:02 crc kubenswrapper[4757]: I1006 13:58:02.046801 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Oct 06 13:58:02 crc kubenswrapper[4757]: I1006 13:58:02.164967 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-0cee-account-create-hlzct"] Oct 06 13:58:02 crc kubenswrapper[4757]: I1006 13:58:02.201213 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="35e902eb-73d7-4f08-b184-3b4804d7b483" path="/var/lib/kubelet/pods/35e902eb-73d7-4f08-b184-3b4804d7b483/volumes" Oct 06 13:58:02 crc kubenswrapper[4757]: I1006 13:58:02.226311 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-3d6a-account-create-ktj8k"] Oct 06 13:58:02 crc kubenswrapper[4757]: I1006 13:58:02.237215 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 06 13:58:02 crc kubenswrapper[4757]: I1006 13:58:02.522795 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-3d6a-account-create-ktj8k" event={"ID":"d11e5852-b82d-4633-9102-b3153ff76ab8","Type":"ContainerStarted","Data":"2ce5bbfdb33f92bc2d7fc9048c4b7ff29928b7e522b0a4d950c6dfedcf2c2880"} Oct 06 13:58:02 crc kubenswrapper[4757]: I1006 13:58:02.523043 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-3d6a-account-create-ktj8k" event={"ID":"d11e5852-b82d-4633-9102-b3153ff76ab8","Type":"ContainerStarted","Data":"09ad04a92768ae6194b80c4915e2a5adde1396607bccc589b8565d77ebfee2f7"} Oct 06 13:58:02 crc kubenswrapper[4757]: I1006 13:58:02.524311 4757 generic.go:334] "Generic (PLEG): container finished" podID="2d124778-aecd-4366-9a84-ab8c2cb478b8" containerID="3180ad49ec83de44356f248b7955019a2a0fc4b090f172d398ff980f512d38f8" exitCode=0 Oct 06 13:58:02 crc kubenswrapper[4757]: I1006 13:58:02.524351 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-d1d1-account-create-pfx4p" event={"ID":"2d124778-aecd-4366-9a84-ab8c2cb478b8","Type":"ContainerDied","Data":"3180ad49ec83de44356f248b7955019a2a0fc4b090f172d398ff980f512d38f8"} Oct 06 13:58:02 crc kubenswrapper[4757]: I1006 13:58:02.524367 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-d1d1-account-create-pfx4p" event={"ID":"2d124778-aecd-4366-9a84-ab8c2cb478b8","Type":"ContainerStarted","Data":"ca6c4e860faa5fe136ed0e7b1799100caad83c8051521e809629350ead9c8a10"} Oct 06 13:58:02 crc kubenswrapper[4757]: I1006 13:58:02.526835 4757 generic.go:334] "Generic (PLEG): container finished" podID="e3515117-5db7-4811-a21b-39aaaf03eda0" containerID="7be833793fbb9ececc1b726d6d92fe03be0ddc3a27f933c54e05fd8e35b9371a" exitCode=0 Oct 06 13:58:02 crc kubenswrapper[4757]: I1006 13:58:02.526887 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-0cee-account-create-hlzct" event={"ID":"e3515117-5db7-4811-a21b-39aaaf03eda0","Type":"ContainerDied","Data":"7be833793fbb9ececc1b726d6d92fe03be0ddc3a27f933c54e05fd8e35b9371a"} Oct 06 13:58:02 crc kubenswrapper[4757]: I1006 13:58:02.526905 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-0cee-account-create-hlzct" event={"ID":"e3515117-5db7-4811-a21b-39aaaf03eda0","Type":"ContainerStarted","Data":"b7a94760245fdc7e287a5dce2379b372ff63146d72649962e3df7901022f9dcc"} Oct 06 13:58:02 crc kubenswrapper[4757]: I1006 13:58:02.538218 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-3d6a-account-create-ktj8k" podStartSLOduration=1.53820029 podStartE2EDuration="1.53820029s" podCreationTimestamp="2025-10-06 13:58:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:58:02.535495654 +0000 UTC m=+1171.032814191" watchObservedRunningTime="2025-10-06 13:58:02.53820029 +0000 UTC m=+1171.035518837" Oct 06 13:58:02 crc kubenswrapper[4757]: W1006 13:58:02.696168 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod741fdac4_544e_4f02_b425_139415645595.slice/crio-78051c2b920e742c33687284569ccd48c38a9769dd125ac5596046f4fbed5925 WatchSource:0}: Error finding container 78051c2b920e742c33687284569ccd48c38a9769dd125ac5596046f4fbed5925: Status 404 returned error can't find the container with id 78051c2b920e742c33687284569ccd48c38a9769dd125ac5596046f4fbed5925 Oct 06 13:58:02 crc kubenswrapper[4757]: I1006 13:58:02.701795 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 06 13:58:02 crc kubenswrapper[4757]: I1006 13:58:02.878432 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 06 13:58:03 crc kubenswrapper[4757]: I1006 13:58:03.560349 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"741fdac4-544e-4f02-b425-139415645595","Type":"ContainerStarted","Data":"78051c2b920e742c33687284569ccd48c38a9769dd125ac5596046f4fbed5925"} Oct 06 13:58:03 crc kubenswrapper[4757]: I1006 13:58:03.562934 4757 generic.go:334] "Generic (PLEG): container finished" podID="d11e5852-b82d-4633-9102-b3153ff76ab8" containerID="2ce5bbfdb33f92bc2d7fc9048c4b7ff29928b7e522b0a4d950c6dfedcf2c2880" exitCode=0 Oct 06 13:58:03 crc kubenswrapper[4757]: I1006 13:58:03.563463 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-3d6a-account-create-ktj8k" event={"ID":"d11e5852-b82d-4633-9102-b3153ff76ab8","Type":"ContainerDied","Data":"2ce5bbfdb33f92bc2d7fc9048c4b7ff29928b7e522b0a4d950c6dfedcf2c2880"} Oct 06 13:58:04 crc kubenswrapper[4757]: I1006 13:58:03.997576 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-0cee-account-create-hlzct" Oct 06 13:58:04 crc kubenswrapper[4757]: I1006 13:58:04.011458 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-d1d1-account-create-pfx4p" Oct 06 13:58:04 crc kubenswrapper[4757]: I1006 13:58:04.082674 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jcrc7\" (UniqueName: \"kubernetes.io/projected/2d124778-aecd-4366-9a84-ab8c2cb478b8-kube-api-access-jcrc7\") pod \"2d124778-aecd-4366-9a84-ab8c2cb478b8\" (UID: \"2d124778-aecd-4366-9a84-ab8c2cb478b8\") " Oct 06 13:58:04 crc kubenswrapper[4757]: I1006 13:58:04.082746 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2zz5x\" (UniqueName: \"kubernetes.io/projected/e3515117-5db7-4811-a21b-39aaaf03eda0-kube-api-access-2zz5x\") pod \"e3515117-5db7-4811-a21b-39aaaf03eda0\" (UID: \"e3515117-5db7-4811-a21b-39aaaf03eda0\") " Oct 06 13:58:04 crc kubenswrapper[4757]: I1006 13:58:04.086821 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e3515117-5db7-4811-a21b-39aaaf03eda0-kube-api-access-2zz5x" (OuterVolumeSpecName: "kube-api-access-2zz5x") pod "e3515117-5db7-4811-a21b-39aaaf03eda0" (UID: "e3515117-5db7-4811-a21b-39aaaf03eda0"). InnerVolumeSpecName "kube-api-access-2zz5x". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:58:04 crc kubenswrapper[4757]: I1006 13:58:04.088737 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2d124778-aecd-4366-9a84-ab8c2cb478b8-kube-api-access-jcrc7" (OuterVolumeSpecName: "kube-api-access-jcrc7") pod "2d124778-aecd-4366-9a84-ab8c2cb478b8" (UID: "2d124778-aecd-4366-9a84-ab8c2cb478b8"). InnerVolumeSpecName "kube-api-access-jcrc7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:58:04 crc kubenswrapper[4757]: I1006 13:58:04.186215 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jcrc7\" (UniqueName: \"kubernetes.io/projected/2d124778-aecd-4366-9a84-ab8c2cb478b8-kube-api-access-jcrc7\") on node \"crc\" DevicePath \"\"" Oct 06 13:58:04 crc kubenswrapper[4757]: I1006 13:58:04.186818 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2zz5x\" (UniqueName: \"kubernetes.io/projected/e3515117-5db7-4811-a21b-39aaaf03eda0-kube-api-access-2zz5x\") on node \"crc\" DevicePath \"\"" Oct 06 13:58:04 crc kubenswrapper[4757]: I1006 13:58:04.363264 4757 patch_prober.go:28] interesting pod/machine-config-daemon-7tb7h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 06 13:58:04 crc kubenswrapper[4757]: I1006 13:58:04.363324 4757 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 06 13:58:04 crc kubenswrapper[4757]: I1006 13:58:04.363368 4757 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" Oct 06 13:58:04 crc kubenswrapper[4757]: I1006 13:58:04.364049 4757 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"8ba4bb2fc370115674e0f99022b0181292af98f8a9ad6252b38df72c7f3b30ad"} pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 06 13:58:04 crc kubenswrapper[4757]: I1006 13:58:04.364119 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" containerName="machine-config-daemon" containerID="cri-o://8ba4bb2fc370115674e0f99022b0181292af98f8a9ad6252b38df72c7f3b30ad" gracePeriod=600 Oct 06 13:58:04 crc kubenswrapper[4757]: I1006 13:58:04.586800 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-d1d1-account-create-pfx4p" event={"ID":"2d124778-aecd-4366-9a84-ab8c2cb478b8","Type":"ContainerDied","Data":"ca6c4e860faa5fe136ed0e7b1799100caad83c8051521e809629350ead9c8a10"} Oct 06 13:58:04 crc kubenswrapper[4757]: I1006 13:58:04.587179 4757 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ca6c4e860faa5fe136ed0e7b1799100caad83c8051521e809629350ead9c8a10" Oct 06 13:58:04 crc kubenswrapper[4757]: I1006 13:58:04.587256 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-d1d1-account-create-pfx4p" Oct 06 13:58:04 crc kubenswrapper[4757]: I1006 13:58:04.596431 4757 generic.go:334] "Generic (PLEG): container finished" podID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" containerID="8ba4bb2fc370115674e0f99022b0181292af98f8a9ad6252b38df72c7f3b30ad" exitCode=0 Oct 06 13:58:04 crc kubenswrapper[4757]: I1006 13:58:04.596504 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" event={"ID":"0010c888-d5ad-4b2b-8309-1647fdf0dee3","Type":"ContainerDied","Data":"8ba4bb2fc370115674e0f99022b0181292af98f8a9ad6252b38df72c7f3b30ad"} Oct 06 13:58:04 crc kubenswrapper[4757]: I1006 13:58:04.596539 4757 scope.go:117] "RemoveContainer" containerID="c10bf450268206bb6caa070d8d9e8b690b70b76277c3af98f50337e231aead63" Oct 06 13:58:04 crc kubenswrapper[4757]: I1006 13:58:04.604207 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"741fdac4-544e-4f02-b425-139415645595","Type":"ContainerStarted","Data":"9e76bbb31462480d0fc4e74ead3fa1c63cc1689401f06ca01c67c2b54682fe36"} Oct 06 13:58:04 crc kubenswrapper[4757]: I1006 13:58:04.604251 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"741fdac4-544e-4f02-b425-139415645595","Type":"ContainerStarted","Data":"b2b23b21ab7f2366500eeb051010d82935a8374c0b9cf7e03d5810fa064e52aa"} Oct 06 13:58:04 crc kubenswrapper[4757]: I1006 13:58:04.613730 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-0cee-account-create-hlzct" Oct 06 13:58:04 crc kubenswrapper[4757]: I1006 13:58:04.614150 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-0cee-account-create-hlzct" event={"ID":"e3515117-5db7-4811-a21b-39aaaf03eda0","Type":"ContainerDied","Data":"b7a94760245fdc7e287a5dce2379b372ff63146d72649962e3df7901022f9dcc"} Oct 06 13:58:04 crc kubenswrapper[4757]: I1006 13:58:04.614205 4757 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b7a94760245fdc7e287a5dce2379b372ff63146d72649962e3df7901022f9dcc" Oct 06 13:58:04 crc kubenswrapper[4757]: I1006 13:58:04.937080 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-3d6a-account-create-ktj8k" Oct 06 13:58:05 crc kubenswrapper[4757]: I1006 13:58:05.101273 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4zlb6\" (UniqueName: \"kubernetes.io/projected/d11e5852-b82d-4633-9102-b3153ff76ab8-kube-api-access-4zlb6\") pod \"d11e5852-b82d-4633-9102-b3153ff76ab8\" (UID: \"d11e5852-b82d-4633-9102-b3153ff76ab8\") " Oct 06 13:58:05 crc kubenswrapper[4757]: I1006 13:58:05.107363 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d11e5852-b82d-4633-9102-b3153ff76ab8-kube-api-access-4zlb6" (OuterVolumeSpecName: "kube-api-access-4zlb6") pod "d11e5852-b82d-4633-9102-b3153ff76ab8" (UID: "d11e5852-b82d-4633-9102-b3153ff76ab8"). InnerVolumeSpecName "kube-api-access-4zlb6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:58:05 crc kubenswrapper[4757]: I1006 13:58:05.203403 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4zlb6\" (UniqueName: \"kubernetes.io/projected/d11e5852-b82d-4633-9102-b3153ff76ab8-kube-api-access-4zlb6\") on node \"crc\" DevicePath \"\"" Oct 06 13:58:05 crc kubenswrapper[4757]: I1006 13:58:05.623501 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" event={"ID":"0010c888-d5ad-4b2b-8309-1647fdf0dee3","Type":"ContainerStarted","Data":"c34d9f422d1bc09e7e1520320e832d4b94b397917c882ecb52d4c57559a7b9dc"} Oct 06 13:58:05 crc kubenswrapper[4757]: I1006 13:58:05.625487 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"741fdac4-544e-4f02-b425-139415645595","Type":"ContainerStarted","Data":"c2a59d82a3d9ac021f10f166500c167d4c296b8b633bcf297a68adacab716bd2"} Oct 06 13:58:05 crc kubenswrapper[4757]: I1006 13:58:05.627109 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-3d6a-account-create-ktj8k" event={"ID":"d11e5852-b82d-4633-9102-b3153ff76ab8","Type":"ContainerDied","Data":"09ad04a92768ae6194b80c4915e2a5adde1396607bccc589b8565d77ebfee2f7"} Oct 06 13:58:05 crc kubenswrapper[4757]: I1006 13:58:05.627148 4757 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="09ad04a92768ae6194b80c4915e2a5adde1396607bccc589b8565d77ebfee2f7" Oct 06 13:58:05 crc kubenswrapper[4757]: I1006 13:58:05.627173 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-3d6a-account-create-ktj8k" Oct 06 13:58:05 crc kubenswrapper[4757]: I1006 13:58:05.935042 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Oct 06 13:58:05 crc kubenswrapper[4757]: I1006 13:58:05.935112 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Oct 06 13:58:05 crc kubenswrapper[4757]: I1006 13:58:05.980107 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Oct 06 13:58:06 crc kubenswrapper[4757]: I1006 13:58:06.012546 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Oct 06 13:58:06 crc kubenswrapper[4757]: I1006 13:58:06.012939 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Oct 06 13:58:06 crc kubenswrapper[4757]: I1006 13:58:06.403748 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-ststm"] Oct 06 13:58:06 crc kubenswrapper[4757]: E1006 13:58:06.404583 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2d124778-aecd-4366-9a84-ab8c2cb478b8" containerName="mariadb-account-create" Oct 06 13:58:06 crc kubenswrapper[4757]: I1006 13:58:06.404599 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="2d124778-aecd-4366-9a84-ab8c2cb478b8" containerName="mariadb-account-create" Oct 06 13:58:06 crc kubenswrapper[4757]: E1006 13:58:06.404620 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d11e5852-b82d-4633-9102-b3153ff76ab8" containerName="mariadb-account-create" Oct 06 13:58:06 crc kubenswrapper[4757]: I1006 13:58:06.404626 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="d11e5852-b82d-4633-9102-b3153ff76ab8" containerName="mariadb-account-create" Oct 06 13:58:06 crc kubenswrapper[4757]: E1006 13:58:06.404636 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e3515117-5db7-4811-a21b-39aaaf03eda0" containerName="mariadb-account-create" Oct 06 13:58:06 crc kubenswrapper[4757]: I1006 13:58:06.404643 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="e3515117-5db7-4811-a21b-39aaaf03eda0" containerName="mariadb-account-create" Oct 06 13:58:06 crc kubenswrapper[4757]: I1006 13:58:06.404817 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="e3515117-5db7-4811-a21b-39aaaf03eda0" containerName="mariadb-account-create" Oct 06 13:58:06 crc kubenswrapper[4757]: I1006 13:58:06.404833 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="d11e5852-b82d-4633-9102-b3153ff76ab8" containerName="mariadb-account-create" Oct 06 13:58:06 crc kubenswrapper[4757]: I1006 13:58:06.404845 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="2d124778-aecd-4366-9a84-ab8c2cb478b8" containerName="mariadb-account-create" Oct 06 13:58:06 crc kubenswrapper[4757]: I1006 13:58:06.405439 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-ststm" Oct 06 13:58:06 crc kubenswrapper[4757]: I1006 13:58:06.408158 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-hwxdw" Oct 06 13:58:06 crc kubenswrapper[4757]: I1006 13:58:06.408989 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Oct 06 13:58:06 crc kubenswrapper[4757]: I1006 13:58:06.415768 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-ststm"] Oct 06 13:58:06 crc kubenswrapper[4757]: I1006 13:58:06.428124 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Oct 06 13:58:06 crc kubenswrapper[4757]: I1006 13:58:06.523999 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/86a61f37-c383-4faf-b0b1-0abfd8866683-config-data\") pod \"nova-cell0-conductor-db-sync-ststm\" (UID: \"86a61f37-c383-4faf-b0b1-0abfd8866683\") " pod="openstack/nova-cell0-conductor-db-sync-ststm" Oct 06 13:58:06 crc kubenswrapper[4757]: I1006 13:58:06.524234 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8k5nq\" (UniqueName: \"kubernetes.io/projected/86a61f37-c383-4faf-b0b1-0abfd8866683-kube-api-access-8k5nq\") pod \"nova-cell0-conductor-db-sync-ststm\" (UID: \"86a61f37-c383-4faf-b0b1-0abfd8866683\") " pod="openstack/nova-cell0-conductor-db-sync-ststm" Oct 06 13:58:06 crc kubenswrapper[4757]: I1006 13:58:06.524268 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/86a61f37-c383-4faf-b0b1-0abfd8866683-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-ststm\" (UID: \"86a61f37-c383-4faf-b0b1-0abfd8866683\") " pod="openstack/nova-cell0-conductor-db-sync-ststm" Oct 06 13:58:06 crc kubenswrapper[4757]: I1006 13:58:06.524307 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/86a61f37-c383-4faf-b0b1-0abfd8866683-scripts\") pod \"nova-cell0-conductor-db-sync-ststm\" (UID: \"86a61f37-c383-4faf-b0b1-0abfd8866683\") " pod="openstack/nova-cell0-conductor-db-sync-ststm" Oct 06 13:58:06 crc kubenswrapper[4757]: I1006 13:58:06.629200 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/86a61f37-c383-4faf-b0b1-0abfd8866683-config-data\") pod \"nova-cell0-conductor-db-sync-ststm\" (UID: \"86a61f37-c383-4faf-b0b1-0abfd8866683\") " pod="openstack/nova-cell0-conductor-db-sync-ststm" Oct 06 13:58:06 crc kubenswrapper[4757]: I1006 13:58:06.629361 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8k5nq\" (UniqueName: \"kubernetes.io/projected/86a61f37-c383-4faf-b0b1-0abfd8866683-kube-api-access-8k5nq\") pod \"nova-cell0-conductor-db-sync-ststm\" (UID: \"86a61f37-c383-4faf-b0b1-0abfd8866683\") " pod="openstack/nova-cell0-conductor-db-sync-ststm" Oct 06 13:58:06 crc kubenswrapper[4757]: I1006 13:58:06.629398 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/86a61f37-c383-4faf-b0b1-0abfd8866683-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-ststm\" (UID: \"86a61f37-c383-4faf-b0b1-0abfd8866683\") " pod="openstack/nova-cell0-conductor-db-sync-ststm" Oct 06 13:58:06 crc kubenswrapper[4757]: I1006 13:58:06.629441 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/86a61f37-c383-4faf-b0b1-0abfd8866683-scripts\") pod \"nova-cell0-conductor-db-sync-ststm\" (UID: \"86a61f37-c383-4faf-b0b1-0abfd8866683\") " pod="openstack/nova-cell0-conductor-db-sync-ststm" Oct 06 13:58:06 crc kubenswrapper[4757]: I1006 13:58:06.639276 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/86a61f37-c383-4faf-b0b1-0abfd8866683-config-data\") pod \"nova-cell0-conductor-db-sync-ststm\" (UID: \"86a61f37-c383-4faf-b0b1-0abfd8866683\") " pod="openstack/nova-cell0-conductor-db-sync-ststm" Oct 06 13:58:06 crc kubenswrapper[4757]: I1006 13:58:06.647774 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/86a61f37-c383-4faf-b0b1-0abfd8866683-scripts\") pod \"nova-cell0-conductor-db-sync-ststm\" (UID: \"86a61f37-c383-4faf-b0b1-0abfd8866683\") " pod="openstack/nova-cell0-conductor-db-sync-ststm" Oct 06 13:58:06 crc kubenswrapper[4757]: I1006 13:58:06.651461 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/86a61f37-c383-4faf-b0b1-0abfd8866683-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-ststm\" (UID: \"86a61f37-c383-4faf-b0b1-0abfd8866683\") " pod="openstack/nova-cell0-conductor-db-sync-ststm" Oct 06 13:58:06 crc kubenswrapper[4757]: I1006 13:58:06.652657 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8k5nq\" (UniqueName: \"kubernetes.io/projected/86a61f37-c383-4faf-b0b1-0abfd8866683-kube-api-access-8k5nq\") pod \"nova-cell0-conductor-db-sync-ststm\" (UID: \"86a61f37-c383-4faf-b0b1-0abfd8866683\") " pod="openstack/nova-cell0-conductor-db-sync-ststm" Oct 06 13:58:06 crc kubenswrapper[4757]: I1006 13:58:06.663246 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Oct 06 13:58:06 crc kubenswrapper[4757]: I1006 13:58:06.663325 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Oct 06 13:58:06 crc kubenswrapper[4757]: I1006 13:58:06.729489 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-ststm" Oct 06 13:58:07 crc kubenswrapper[4757]: I1006 13:58:07.324749 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-ststm"] Oct 06 13:58:07 crc kubenswrapper[4757]: I1006 13:58:07.672280 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-ststm" event={"ID":"86a61f37-c383-4faf-b0b1-0abfd8866683","Type":"ContainerStarted","Data":"52fdb32606935a1e33a1c091d47d3b50d588e7bd63d62b8bf30511463c69ee28"} Oct 06 13:58:07 crc kubenswrapper[4757]: I1006 13:58:07.674638 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"741fdac4-544e-4f02-b425-139415645595","Type":"ContainerStarted","Data":"fc4266e8007671289e1f06fdf908596f4daf6a6746553106d601fa0f4d406967"} Oct 06 13:58:07 crc kubenswrapper[4757]: I1006 13:58:07.674871 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="741fdac4-544e-4f02-b425-139415645595" containerName="ceilometer-central-agent" containerID="cri-o://b2b23b21ab7f2366500eeb051010d82935a8374c0b9cf7e03d5810fa064e52aa" gracePeriod=30 Oct 06 13:58:07 crc kubenswrapper[4757]: I1006 13:58:07.674896 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="741fdac4-544e-4f02-b425-139415645595" containerName="ceilometer-notification-agent" containerID="cri-o://9e76bbb31462480d0fc4e74ead3fa1c63cc1689401f06ca01c67c2b54682fe36" gracePeriod=30 Oct 06 13:58:07 crc kubenswrapper[4757]: I1006 13:58:07.674892 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="741fdac4-544e-4f02-b425-139415645595" containerName="proxy-httpd" containerID="cri-o://fc4266e8007671289e1f06fdf908596f4daf6a6746553106d601fa0f4d406967" gracePeriod=30 Oct 06 13:58:07 crc kubenswrapper[4757]: I1006 13:58:07.675185 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="741fdac4-544e-4f02-b425-139415645595" containerName="sg-core" containerID="cri-o://c2a59d82a3d9ac021f10f166500c167d4c296b8b633bcf297a68adacab716bd2" gracePeriod=30 Oct 06 13:58:07 crc kubenswrapper[4757]: I1006 13:58:07.705696 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.5865496759999997 podStartE2EDuration="6.705666798s" podCreationTimestamp="2025-10-06 13:58:01 +0000 UTC" firstStartedPulling="2025-10-06 13:58:02.69824548 +0000 UTC m=+1171.195564017" lastFinishedPulling="2025-10-06 13:58:06.817362602 +0000 UTC m=+1175.314681139" observedRunningTime="2025-10-06 13:58:07.694732688 +0000 UTC m=+1176.192051235" watchObservedRunningTime="2025-10-06 13:58:07.705666798 +0000 UTC m=+1176.202985335" Oct 06 13:58:08 crc kubenswrapper[4757]: I1006 13:58:08.687702 4757 generic.go:334] "Generic (PLEG): container finished" podID="741fdac4-544e-4f02-b425-139415645595" containerID="fc4266e8007671289e1f06fdf908596f4daf6a6746553106d601fa0f4d406967" exitCode=0 Oct 06 13:58:08 crc kubenswrapper[4757]: I1006 13:58:08.687732 4757 generic.go:334] "Generic (PLEG): container finished" podID="741fdac4-544e-4f02-b425-139415645595" containerID="c2a59d82a3d9ac021f10f166500c167d4c296b8b633bcf297a68adacab716bd2" exitCode=2 Oct 06 13:58:08 crc kubenswrapper[4757]: I1006 13:58:08.687741 4757 generic.go:334] "Generic (PLEG): container finished" podID="741fdac4-544e-4f02-b425-139415645595" containerID="9e76bbb31462480d0fc4e74ead3fa1c63cc1689401f06ca01c67c2b54682fe36" exitCode=0 Oct 06 13:58:08 crc kubenswrapper[4757]: I1006 13:58:08.687783 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"741fdac4-544e-4f02-b425-139415645595","Type":"ContainerDied","Data":"fc4266e8007671289e1f06fdf908596f4daf6a6746553106d601fa0f4d406967"} Oct 06 13:58:08 crc kubenswrapper[4757]: I1006 13:58:08.687838 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"741fdac4-544e-4f02-b425-139415645595","Type":"ContainerDied","Data":"c2a59d82a3d9ac021f10f166500c167d4c296b8b633bcf297a68adacab716bd2"} Oct 06 13:58:08 crc kubenswrapper[4757]: I1006 13:58:08.687849 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"741fdac4-544e-4f02-b425-139415645595","Type":"ContainerDied","Data":"9e76bbb31462480d0fc4e74ead3fa1c63cc1689401f06ca01c67c2b54682fe36"} Oct 06 13:58:08 crc kubenswrapper[4757]: I1006 13:58:08.687807 4757 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 06 13:58:08 crc kubenswrapper[4757]: I1006 13:58:08.687872 4757 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 06 13:58:08 crc kubenswrapper[4757]: I1006 13:58:08.906089 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Oct 06 13:58:08 crc kubenswrapper[4757]: I1006 13:58:08.917654 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Oct 06 13:58:08 crc kubenswrapper[4757]: I1006 13:58:08.948421 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Oct 06 13:58:08 crc kubenswrapper[4757]: I1006 13:58:08.949927 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Oct 06 13:58:09 crc kubenswrapper[4757]: I1006 13:58:09.003375 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Oct 06 13:58:09 crc kubenswrapper[4757]: I1006 13:58:09.018743 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Oct 06 13:58:09 crc kubenswrapper[4757]: I1006 13:58:09.701666 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Oct 06 13:58:09 crc kubenswrapper[4757]: I1006 13:58:09.701963 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Oct 06 13:58:10 crc kubenswrapper[4757]: I1006 13:58:10.717669 4757 generic.go:334] "Generic (PLEG): container finished" podID="741fdac4-544e-4f02-b425-139415645595" containerID="b2b23b21ab7f2366500eeb051010d82935a8374c0b9cf7e03d5810fa064e52aa" exitCode=0 Oct 06 13:58:10 crc kubenswrapper[4757]: I1006 13:58:10.718657 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"741fdac4-544e-4f02-b425-139415645595","Type":"ContainerDied","Data":"b2b23b21ab7f2366500eeb051010d82935a8374c0b9cf7e03d5810fa064e52aa"} Oct 06 13:58:11 crc kubenswrapper[4757]: I1006 13:58:11.624232 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Oct 06 13:58:11 crc kubenswrapper[4757]: I1006 13:58:11.628665 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Oct 06 13:58:14 crc kubenswrapper[4757]: I1006 13:58:14.444358 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 06 13:58:14 crc kubenswrapper[4757]: I1006 13:58:14.569216 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/741fdac4-544e-4f02-b425-139415645595-run-httpd\") pod \"741fdac4-544e-4f02-b425-139415645595\" (UID: \"741fdac4-544e-4f02-b425-139415645595\") " Oct 06 13:58:14 crc kubenswrapper[4757]: I1006 13:58:14.569318 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/741fdac4-544e-4f02-b425-139415645595-config-data\") pod \"741fdac4-544e-4f02-b425-139415645595\" (UID: \"741fdac4-544e-4f02-b425-139415645595\") " Oct 06 13:58:14 crc kubenswrapper[4757]: I1006 13:58:14.569352 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k9rms\" (UniqueName: \"kubernetes.io/projected/741fdac4-544e-4f02-b425-139415645595-kube-api-access-k9rms\") pod \"741fdac4-544e-4f02-b425-139415645595\" (UID: \"741fdac4-544e-4f02-b425-139415645595\") " Oct 06 13:58:14 crc kubenswrapper[4757]: I1006 13:58:14.569384 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/741fdac4-544e-4f02-b425-139415645595-log-httpd\") pod \"741fdac4-544e-4f02-b425-139415645595\" (UID: \"741fdac4-544e-4f02-b425-139415645595\") " Oct 06 13:58:14 crc kubenswrapper[4757]: I1006 13:58:14.569424 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/741fdac4-544e-4f02-b425-139415645595-combined-ca-bundle\") pod \"741fdac4-544e-4f02-b425-139415645595\" (UID: \"741fdac4-544e-4f02-b425-139415645595\") " Oct 06 13:58:14 crc kubenswrapper[4757]: I1006 13:58:14.569447 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/741fdac4-544e-4f02-b425-139415645595-scripts\") pod \"741fdac4-544e-4f02-b425-139415645595\" (UID: \"741fdac4-544e-4f02-b425-139415645595\") " Oct 06 13:58:14 crc kubenswrapper[4757]: I1006 13:58:14.569512 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/741fdac4-544e-4f02-b425-139415645595-sg-core-conf-yaml\") pod \"741fdac4-544e-4f02-b425-139415645595\" (UID: \"741fdac4-544e-4f02-b425-139415645595\") " Oct 06 13:58:14 crc kubenswrapper[4757]: I1006 13:58:14.569778 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/741fdac4-544e-4f02-b425-139415645595-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "741fdac4-544e-4f02-b425-139415645595" (UID: "741fdac4-544e-4f02-b425-139415645595"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 13:58:14 crc kubenswrapper[4757]: I1006 13:58:14.570174 4757 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/741fdac4-544e-4f02-b425-139415645595-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 06 13:58:14 crc kubenswrapper[4757]: I1006 13:58:14.570439 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/741fdac4-544e-4f02-b425-139415645595-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "741fdac4-544e-4f02-b425-139415645595" (UID: "741fdac4-544e-4f02-b425-139415645595"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 13:58:14 crc kubenswrapper[4757]: I1006 13:58:14.577037 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/741fdac4-544e-4f02-b425-139415645595-kube-api-access-k9rms" (OuterVolumeSpecName: "kube-api-access-k9rms") pod "741fdac4-544e-4f02-b425-139415645595" (UID: "741fdac4-544e-4f02-b425-139415645595"). InnerVolumeSpecName "kube-api-access-k9rms". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:58:14 crc kubenswrapper[4757]: I1006 13:58:14.577714 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/741fdac4-544e-4f02-b425-139415645595-scripts" (OuterVolumeSpecName: "scripts") pod "741fdac4-544e-4f02-b425-139415645595" (UID: "741fdac4-544e-4f02-b425-139415645595"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:58:14 crc kubenswrapper[4757]: I1006 13:58:14.631402 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/741fdac4-544e-4f02-b425-139415645595-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "741fdac4-544e-4f02-b425-139415645595" (UID: "741fdac4-544e-4f02-b425-139415645595"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:58:14 crc kubenswrapper[4757]: I1006 13:58:14.672477 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k9rms\" (UniqueName: \"kubernetes.io/projected/741fdac4-544e-4f02-b425-139415645595-kube-api-access-k9rms\") on node \"crc\" DevicePath \"\"" Oct 06 13:58:14 crc kubenswrapper[4757]: I1006 13:58:14.672519 4757 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/741fdac4-544e-4f02-b425-139415645595-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 06 13:58:14 crc kubenswrapper[4757]: I1006 13:58:14.672534 4757 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/741fdac4-544e-4f02-b425-139415645595-scripts\") on node \"crc\" DevicePath \"\"" Oct 06 13:58:14 crc kubenswrapper[4757]: I1006 13:58:14.672546 4757 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/741fdac4-544e-4f02-b425-139415645595-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 06 13:58:14 crc kubenswrapper[4757]: I1006 13:58:14.680743 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/741fdac4-544e-4f02-b425-139415645595-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "741fdac4-544e-4f02-b425-139415645595" (UID: "741fdac4-544e-4f02-b425-139415645595"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:58:14 crc kubenswrapper[4757]: I1006 13:58:14.706550 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/741fdac4-544e-4f02-b425-139415645595-config-data" (OuterVolumeSpecName: "config-data") pod "741fdac4-544e-4f02-b425-139415645595" (UID: "741fdac4-544e-4f02-b425-139415645595"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:58:14 crc kubenswrapper[4757]: I1006 13:58:14.760912 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"741fdac4-544e-4f02-b425-139415645595","Type":"ContainerDied","Data":"78051c2b920e742c33687284569ccd48c38a9769dd125ac5596046f4fbed5925"} Oct 06 13:58:14 crc kubenswrapper[4757]: I1006 13:58:14.761230 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 06 13:58:14 crc kubenswrapper[4757]: I1006 13:58:14.761238 4757 scope.go:117] "RemoveContainer" containerID="fc4266e8007671289e1f06fdf908596f4daf6a6746553106d601fa0f4d406967" Oct 06 13:58:14 crc kubenswrapper[4757]: I1006 13:58:14.774218 4757 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/741fdac4-544e-4f02-b425-139415645595-config-data\") on node \"crc\" DevicePath \"\"" Oct 06 13:58:14 crc kubenswrapper[4757]: I1006 13:58:14.774248 4757 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/741fdac4-544e-4f02-b425-139415645595-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 06 13:58:14 crc kubenswrapper[4757]: I1006 13:58:14.798440 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 06 13:58:14 crc kubenswrapper[4757]: I1006 13:58:14.805882 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 06 13:58:14 crc kubenswrapper[4757]: I1006 13:58:14.830057 4757 scope.go:117] "RemoveContainer" containerID="c2a59d82a3d9ac021f10f166500c167d4c296b8b633bcf297a68adacab716bd2" Oct 06 13:58:14 crc kubenswrapper[4757]: I1006 13:58:14.831836 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 06 13:58:14 crc kubenswrapper[4757]: E1006 13:58:14.832295 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="741fdac4-544e-4f02-b425-139415645595" containerName="sg-core" Oct 06 13:58:14 crc kubenswrapper[4757]: I1006 13:58:14.832309 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="741fdac4-544e-4f02-b425-139415645595" containerName="sg-core" Oct 06 13:58:14 crc kubenswrapper[4757]: E1006 13:58:14.832322 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="741fdac4-544e-4f02-b425-139415645595" containerName="ceilometer-central-agent" Oct 06 13:58:14 crc kubenswrapper[4757]: I1006 13:58:14.832330 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="741fdac4-544e-4f02-b425-139415645595" containerName="ceilometer-central-agent" Oct 06 13:58:14 crc kubenswrapper[4757]: E1006 13:58:14.832382 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="741fdac4-544e-4f02-b425-139415645595" containerName="proxy-httpd" Oct 06 13:58:14 crc kubenswrapper[4757]: I1006 13:58:14.832392 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="741fdac4-544e-4f02-b425-139415645595" containerName="proxy-httpd" Oct 06 13:58:14 crc kubenswrapper[4757]: E1006 13:58:14.832404 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="741fdac4-544e-4f02-b425-139415645595" containerName="ceilometer-notification-agent" Oct 06 13:58:14 crc kubenswrapper[4757]: I1006 13:58:14.832410 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="741fdac4-544e-4f02-b425-139415645595" containerName="ceilometer-notification-agent" Oct 06 13:58:14 crc kubenswrapper[4757]: I1006 13:58:14.832612 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="741fdac4-544e-4f02-b425-139415645595" containerName="ceilometer-central-agent" Oct 06 13:58:14 crc kubenswrapper[4757]: I1006 13:58:14.832627 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="741fdac4-544e-4f02-b425-139415645595" containerName="ceilometer-notification-agent" Oct 06 13:58:14 crc kubenswrapper[4757]: I1006 13:58:14.832649 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="741fdac4-544e-4f02-b425-139415645595" containerName="proxy-httpd" Oct 06 13:58:14 crc kubenswrapper[4757]: I1006 13:58:14.832665 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="741fdac4-544e-4f02-b425-139415645595" containerName="sg-core" Oct 06 13:58:14 crc kubenswrapper[4757]: I1006 13:58:14.834233 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 06 13:58:14 crc kubenswrapper[4757]: I1006 13:58:14.837348 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 06 13:58:14 crc kubenswrapper[4757]: I1006 13:58:14.837589 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 06 13:58:14 crc kubenswrapper[4757]: I1006 13:58:14.859016 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 06 13:58:14 crc kubenswrapper[4757]: I1006 13:58:14.884951 4757 scope.go:117] "RemoveContainer" containerID="9e76bbb31462480d0fc4e74ead3fa1c63cc1689401f06ca01c67c2b54682fe36" Oct 06 13:58:14 crc kubenswrapper[4757]: I1006 13:58:14.902779 4757 scope.go:117] "RemoveContainer" containerID="b2b23b21ab7f2366500eeb051010d82935a8374c0b9cf7e03d5810fa064e52aa" Oct 06 13:58:14 crc kubenswrapper[4757]: I1006 13:58:14.977285 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/cf4e659c-6f03-41e2-b982-6079035bd42a-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"cf4e659c-6f03-41e2-b982-6079035bd42a\") " pod="openstack/ceilometer-0" Oct 06 13:58:14 crc kubenswrapper[4757]: I1006 13:58:14.977318 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cf4e659c-6f03-41e2-b982-6079035bd42a-log-httpd\") pod \"ceilometer-0\" (UID: \"cf4e659c-6f03-41e2-b982-6079035bd42a\") " pod="openstack/ceilometer-0" Oct 06 13:58:14 crc kubenswrapper[4757]: I1006 13:58:14.977351 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf4e659c-6f03-41e2-b982-6079035bd42a-config-data\") pod \"ceilometer-0\" (UID: \"cf4e659c-6f03-41e2-b982-6079035bd42a\") " pod="openstack/ceilometer-0" Oct 06 13:58:14 crc kubenswrapper[4757]: I1006 13:58:14.977372 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fxd7z\" (UniqueName: \"kubernetes.io/projected/cf4e659c-6f03-41e2-b982-6079035bd42a-kube-api-access-fxd7z\") pod \"ceilometer-0\" (UID: \"cf4e659c-6f03-41e2-b982-6079035bd42a\") " pod="openstack/ceilometer-0" Oct 06 13:58:14 crc kubenswrapper[4757]: I1006 13:58:14.977400 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cf4e659c-6f03-41e2-b982-6079035bd42a-run-httpd\") pod \"ceilometer-0\" (UID: \"cf4e659c-6f03-41e2-b982-6079035bd42a\") " pod="openstack/ceilometer-0" Oct 06 13:58:14 crc kubenswrapper[4757]: I1006 13:58:14.977557 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf4e659c-6f03-41e2-b982-6079035bd42a-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"cf4e659c-6f03-41e2-b982-6079035bd42a\") " pod="openstack/ceilometer-0" Oct 06 13:58:14 crc kubenswrapper[4757]: I1006 13:58:14.977588 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cf4e659c-6f03-41e2-b982-6079035bd42a-scripts\") pod \"ceilometer-0\" (UID: \"cf4e659c-6f03-41e2-b982-6079035bd42a\") " pod="openstack/ceilometer-0" Oct 06 13:58:15 crc kubenswrapper[4757]: I1006 13:58:15.078806 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf4e659c-6f03-41e2-b982-6079035bd42a-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"cf4e659c-6f03-41e2-b982-6079035bd42a\") " pod="openstack/ceilometer-0" Oct 06 13:58:15 crc kubenswrapper[4757]: I1006 13:58:15.078872 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cf4e659c-6f03-41e2-b982-6079035bd42a-scripts\") pod \"ceilometer-0\" (UID: \"cf4e659c-6f03-41e2-b982-6079035bd42a\") " pod="openstack/ceilometer-0" Oct 06 13:58:15 crc kubenswrapper[4757]: I1006 13:58:15.078962 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/cf4e659c-6f03-41e2-b982-6079035bd42a-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"cf4e659c-6f03-41e2-b982-6079035bd42a\") " pod="openstack/ceilometer-0" Oct 06 13:58:15 crc kubenswrapper[4757]: I1006 13:58:15.078985 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cf4e659c-6f03-41e2-b982-6079035bd42a-log-httpd\") pod \"ceilometer-0\" (UID: \"cf4e659c-6f03-41e2-b982-6079035bd42a\") " pod="openstack/ceilometer-0" Oct 06 13:58:15 crc kubenswrapper[4757]: I1006 13:58:15.079009 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf4e659c-6f03-41e2-b982-6079035bd42a-config-data\") pod \"ceilometer-0\" (UID: \"cf4e659c-6f03-41e2-b982-6079035bd42a\") " pod="openstack/ceilometer-0" Oct 06 13:58:15 crc kubenswrapper[4757]: I1006 13:58:15.079029 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fxd7z\" (UniqueName: \"kubernetes.io/projected/cf4e659c-6f03-41e2-b982-6079035bd42a-kube-api-access-fxd7z\") pod \"ceilometer-0\" (UID: \"cf4e659c-6f03-41e2-b982-6079035bd42a\") " pod="openstack/ceilometer-0" Oct 06 13:58:15 crc kubenswrapper[4757]: I1006 13:58:15.079064 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cf4e659c-6f03-41e2-b982-6079035bd42a-run-httpd\") pod \"ceilometer-0\" (UID: \"cf4e659c-6f03-41e2-b982-6079035bd42a\") " pod="openstack/ceilometer-0" Oct 06 13:58:15 crc kubenswrapper[4757]: I1006 13:58:15.079618 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cf4e659c-6f03-41e2-b982-6079035bd42a-run-httpd\") pod \"ceilometer-0\" (UID: \"cf4e659c-6f03-41e2-b982-6079035bd42a\") " pod="openstack/ceilometer-0" Oct 06 13:58:15 crc kubenswrapper[4757]: I1006 13:58:15.079853 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cf4e659c-6f03-41e2-b982-6079035bd42a-log-httpd\") pod \"ceilometer-0\" (UID: \"cf4e659c-6f03-41e2-b982-6079035bd42a\") " pod="openstack/ceilometer-0" Oct 06 13:58:15 crc kubenswrapper[4757]: I1006 13:58:15.083328 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cf4e659c-6f03-41e2-b982-6079035bd42a-scripts\") pod \"ceilometer-0\" (UID: \"cf4e659c-6f03-41e2-b982-6079035bd42a\") " pod="openstack/ceilometer-0" Oct 06 13:58:15 crc kubenswrapper[4757]: I1006 13:58:15.083786 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf4e659c-6f03-41e2-b982-6079035bd42a-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"cf4e659c-6f03-41e2-b982-6079035bd42a\") " pod="openstack/ceilometer-0" Oct 06 13:58:15 crc kubenswrapper[4757]: I1006 13:58:15.083819 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/cf4e659c-6f03-41e2-b982-6079035bd42a-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"cf4e659c-6f03-41e2-b982-6079035bd42a\") " pod="openstack/ceilometer-0" Oct 06 13:58:15 crc kubenswrapper[4757]: I1006 13:58:15.086046 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf4e659c-6f03-41e2-b982-6079035bd42a-config-data\") pod \"ceilometer-0\" (UID: \"cf4e659c-6f03-41e2-b982-6079035bd42a\") " pod="openstack/ceilometer-0" Oct 06 13:58:15 crc kubenswrapper[4757]: I1006 13:58:15.100693 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fxd7z\" (UniqueName: \"kubernetes.io/projected/cf4e659c-6f03-41e2-b982-6079035bd42a-kube-api-access-fxd7z\") pod \"ceilometer-0\" (UID: \"cf4e659c-6f03-41e2-b982-6079035bd42a\") " pod="openstack/ceilometer-0" Oct 06 13:58:15 crc kubenswrapper[4757]: I1006 13:58:15.163330 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 06 13:58:15 crc kubenswrapper[4757]: I1006 13:58:15.618471 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 06 13:58:15 crc kubenswrapper[4757]: W1006 13:58:15.624823 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcf4e659c_6f03_41e2_b982_6079035bd42a.slice/crio-415a057a0ef033119708f31292b62ea8db40222c52d2aaae7a61ab581beb2bb4 WatchSource:0}: Error finding container 415a057a0ef033119708f31292b62ea8db40222c52d2aaae7a61ab581beb2bb4: Status 404 returned error can't find the container with id 415a057a0ef033119708f31292b62ea8db40222c52d2aaae7a61ab581beb2bb4 Oct 06 13:58:15 crc kubenswrapper[4757]: I1006 13:58:15.774285 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cf4e659c-6f03-41e2-b982-6079035bd42a","Type":"ContainerStarted","Data":"415a057a0ef033119708f31292b62ea8db40222c52d2aaae7a61ab581beb2bb4"} Oct 06 13:58:15 crc kubenswrapper[4757]: I1006 13:58:15.779135 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-ststm" event={"ID":"86a61f37-c383-4faf-b0b1-0abfd8866683","Type":"ContainerStarted","Data":"4b13bf86c84437f44865715abaf06cb2add1a3ab9683cc1eea5272ace7b108bd"} Oct 06 13:58:15 crc kubenswrapper[4757]: I1006 13:58:15.803788 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-ststm" podStartSLOduration=2.231693365 podStartE2EDuration="9.803762551s" podCreationTimestamp="2025-10-06 13:58:06 +0000 UTC" firstStartedPulling="2025-10-06 13:58:07.334413702 +0000 UTC m=+1175.831732239" lastFinishedPulling="2025-10-06 13:58:14.906482888 +0000 UTC m=+1183.403801425" observedRunningTime="2025-10-06 13:58:15.803380449 +0000 UTC m=+1184.300699006" watchObservedRunningTime="2025-10-06 13:58:15.803762551 +0000 UTC m=+1184.301081128" Oct 06 13:58:16 crc kubenswrapper[4757]: I1006 13:58:16.206981 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="741fdac4-544e-4f02-b425-139415645595" path="/var/lib/kubelet/pods/741fdac4-544e-4f02-b425-139415645595/volumes" Oct 06 13:58:16 crc kubenswrapper[4757]: I1006 13:58:16.788049 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cf4e659c-6f03-41e2-b982-6079035bd42a","Type":"ContainerStarted","Data":"7d149439c587d44b495c59d7b19ed16c0fe9ee268a5a213e8daa889470e05f3a"} Oct 06 13:58:16 crc kubenswrapper[4757]: I1006 13:58:16.870049 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 06 13:58:17 crc kubenswrapper[4757]: I1006 13:58:17.800975 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cf4e659c-6f03-41e2-b982-6079035bd42a","Type":"ContainerStarted","Data":"73606ee2751c433602011494573328d8a9364b455759ca341bbda4a8f5e55743"} Oct 06 13:58:17 crc kubenswrapper[4757]: I1006 13:58:17.801387 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cf4e659c-6f03-41e2-b982-6079035bd42a","Type":"ContainerStarted","Data":"fb8312dd562b87a058b6fa37a9f20381d17877bbd057a0dc7295073b55d4ebfd"} Oct 06 13:58:19 crc kubenswrapper[4757]: I1006 13:58:19.828330 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cf4e659c-6f03-41e2-b982-6079035bd42a","Type":"ContainerStarted","Data":"fcb11eea12910f9e3e80285639401c65e1560951c47ae5887a3036488ccdd936"} Oct 06 13:58:19 crc kubenswrapper[4757]: I1006 13:58:19.829043 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 06 13:58:19 crc kubenswrapper[4757]: I1006 13:58:19.828610 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="cf4e659c-6f03-41e2-b982-6079035bd42a" containerName="proxy-httpd" containerID="cri-o://fcb11eea12910f9e3e80285639401c65e1560951c47ae5887a3036488ccdd936" gracePeriod=30 Oct 06 13:58:19 crc kubenswrapper[4757]: I1006 13:58:19.828546 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="cf4e659c-6f03-41e2-b982-6079035bd42a" containerName="ceilometer-central-agent" containerID="cri-o://7d149439c587d44b495c59d7b19ed16c0fe9ee268a5a213e8daa889470e05f3a" gracePeriod=30 Oct 06 13:58:19 crc kubenswrapper[4757]: I1006 13:58:19.828704 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="cf4e659c-6f03-41e2-b982-6079035bd42a" containerName="ceilometer-notification-agent" containerID="cri-o://fb8312dd562b87a058b6fa37a9f20381d17877bbd057a0dc7295073b55d4ebfd" gracePeriod=30 Oct 06 13:58:19 crc kubenswrapper[4757]: I1006 13:58:19.828719 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="cf4e659c-6f03-41e2-b982-6079035bd42a" containerName="sg-core" containerID="cri-o://73606ee2751c433602011494573328d8a9364b455759ca341bbda4a8f5e55743" gracePeriod=30 Oct 06 13:58:19 crc kubenswrapper[4757]: I1006 13:58:19.853249 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.1948515 podStartE2EDuration="5.853234165s" podCreationTimestamp="2025-10-06 13:58:14 +0000 UTC" firstStartedPulling="2025-10-06 13:58:15.628231145 +0000 UTC m=+1184.125549682" lastFinishedPulling="2025-10-06 13:58:19.2866138 +0000 UTC m=+1187.783932347" observedRunningTime="2025-10-06 13:58:19.848576907 +0000 UTC m=+1188.345895464" watchObservedRunningTime="2025-10-06 13:58:19.853234165 +0000 UTC m=+1188.350552692" Oct 06 13:58:20 crc kubenswrapper[4757]: I1006 13:58:20.840083 4757 generic.go:334] "Generic (PLEG): container finished" podID="cf4e659c-6f03-41e2-b982-6079035bd42a" containerID="fcb11eea12910f9e3e80285639401c65e1560951c47ae5887a3036488ccdd936" exitCode=0 Oct 06 13:58:20 crc kubenswrapper[4757]: I1006 13:58:20.840433 4757 generic.go:334] "Generic (PLEG): container finished" podID="cf4e659c-6f03-41e2-b982-6079035bd42a" containerID="73606ee2751c433602011494573328d8a9364b455759ca341bbda4a8f5e55743" exitCode=2 Oct 06 13:58:20 crc kubenswrapper[4757]: I1006 13:58:20.840142 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cf4e659c-6f03-41e2-b982-6079035bd42a","Type":"ContainerDied","Data":"fcb11eea12910f9e3e80285639401c65e1560951c47ae5887a3036488ccdd936"} Oct 06 13:58:20 crc kubenswrapper[4757]: I1006 13:58:20.840474 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cf4e659c-6f03-41e2-b982-6079035bd42a","Type":"ContainerDied","Data":"73606ee2751c433602011494573328d8a9364b455759ca341bbda4a8f5e55743"} Oct 06 13:58:20 crc kubenswrapper[4757]: I1006 13:58:20.840489 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cf4e659c-6f03-41e2-b982-6079035bd42a","Type":"ContainerDied","Data":"fb8312dd562b87a058b6fa37a9f20381d17877bbd057a0dc7295073b55d4ebfd"} Oct 06 13:58:20 crc kubenswrapper[4757]: I1006 13:58:20.840446 4757 generic.go:334] "Generic (PLEG): container finished" podID="cf4e659c-6f03-41e2-b982-6079035bd42a" containerID="fb8312dd562b87a058b6fa37a9f20381d17877bbd057a0dc7295073b55d4ebfd" exitCode=0 Oct 06 13:58:24 crc kubenswrapper[4757]: I1006 13:58:24.549205 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 06 13:58:24 crc kubenswrapper[4757]: I1006 13:58:24.581577 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cf4e659c-6f03-41e2-b982-6079035bd42a-scripts\") pod \"cf4e659c-6f03-41e2-b982-6079035bd42a\" (UID: \"cf4e659c-6f03-41e2-b982-6079035bd42a\") " Oct 06 13:58:24 crc kubenswrapper[4757]: I1006 13:58:24.581762 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/cf4e659c-6f03-41e2-b982-6079035bd42a-sg-core-conf-yaml\") pod \"cf4e659c-6f03-41e2-b982-6079035bd42a\" (UID: \"cf4e659c-6f03-41e2-b982-6079035bd42a\") " Oct 06 13:58:24 crc kubenswrapper[4757]: I1006 13:58:24.581853 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cf4e659c-6f03-41e2-b982-6079035bd42a-log-httpd\") pod \"cf4e659c-6f03-41e2-b982-6079035bd42a\" (UID: \"cf4e659c-6f03-41e2-b982-6079035bd42a\") " Oct 06 13:58:24 crc kubenswrapper[4757]: I1006 13:58:24.581879 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fxd7z\" (UniqueName: \"kubernetes.io/projected/cf4e659c-6f03-41e2-b982-6079035bd42a-kube-api-access-fxd7z\") pod \"cf4e659c-6f03-41e2-b982-6079035bd42a\" (UID: \"cf4e659c-6f03-41e2-b982-6079035bd42a\") " Oct 06 13:58:24 crc kubenswrapper[4757]: I1006 13:58:24.581928 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cf4e659c-6f03-41e2-b982-6079035bd42a-run-httpd\") pod \"cf4e659c-6f03-41e2-b982-6079035bd42a\" (UID: \"cf4e659c-6f03-41e2-b982-6079035bd42a\") " Oct 06 13:58:24 crc kubenswrapper[4757]: I1006 13:58:24.582021 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf4e659c-6f03-41e2-b982-6079035bd42a-combined-ca-bundle\") pod \"cf4e659c-6f03-41e2-b982-6079035bd42a\" (UID: \"cf4e659c-6f03-41e2-b982-6079035bd42a\") " Oct 06 13:58:24 crc kubenswrapper[4757]: I1006 13:58:24.582068 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf4e659c-6f03-41e2-b982-6079035bd42a-config-data\") pod \"cf4e659c-6f03-41e2-b982-6079035bd42a\" (UID: \"cf4e659c-6f03-41e2-b982-6079035bd42a\") " Oct 06 13:58:24 crc kubenswrapper[4757]: I1006 13:58:24.597819 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cf4e659c-6f03-41e2-b982-6079035bd42a-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "cf4e659c-6f03-41e2-b982-6079035bd42a" (UID: "cf4e659c-6f03-41e2-b982-6079035bd42a"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 13:58:24 crc kubenswrapper[4757]: I1006 13:58:24.622543 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cf4e659c-6f03-41e2-b982-6079035bd42a-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "cf4e659c-6f03-41e2-b982-6079035bd42a" (UID: "cf4e659c-6f03-41e2-b982-6079035bd42a"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 13:58:24 crc kubenswrapper[4757]: I1006 13:58:24.626347 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf4e659c-6f03-41e2-b982-6079035bd42a-scripts" (OuterVolumeSpecName: "scripts") pod "cf4e659c-6f03-41e2-b982-6079035bd42a" (UID: "cf4e659c-6f03-41e2-b982-6079035bd42a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:58:24 crc kubenswrapper[4757]: I1006 13:58:24.626504 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cf4e659c-6f03-41e2-b982-6079035bd42a-kube-api-access-fxd7z" (OuterVolumeSpecName: "kube-api-access-fxd7z") pod "cf4e659c-6f03-41e2-b982-6079035bd42a" (UID: "cf4e659c-6f03-41e2-b982-6079035bd42a"). InnerVolumeSpecName "kube-api-access-fxd7z". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:58:24 crc kubenswrapper[4757]: I1006 13:58:24.629528 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf4e659c-6f03-41e2-b982-6079035bd42a-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "cf4e659c-6f03-41e2-b982-6079035bd42a" (UID: "cf4e659c-6f03-41e2-b982-6079035bd42a"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:58:24 crc kubenswrapper[4757]: I1006 13:58:24.684496 4757 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cf4e659c-6f03-41e2-b982-6079035bd42a-scripts\") on node \"crc\" DevicePath \"\"" Oct 06 13:58:24 crc kubenswrapper[4757]: I1006 13:58:24.684534 4757 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/cf4e659c-6f03-41e2-b982-6079035bd42a-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 06 13:58:24 crc kubenswrapper[4757]: I1006 13:58:24.684548 4757 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cf4e659c-6f03-41e2-b982-6079035bd42a-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 06 13:58:24 crc kubenswrapper[4757]: I1006 13:58:24.684561 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fxd7z\" (UniqueName: \"kubernetes.io/projected/cf4e659c-6f03-41e2-b982-6079035bd42a-kube-api-access-fxd7z\") on node \"crc\" DevicePath \"\"" Oct 06 13:58:24 crc kubenswrapper[4757]: I1006 13:58:24.684572 4757 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cf4e659c-6f03-41e2-b982-6079035bd42a-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 06 13:58:24 crc kubenswrapper[4757]: I1006 13:58:24.711346 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf4e659c-6f03-41e2-b982-6079035bd42a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cf4e659c-6f03-41e2-b982-6079035bd42a" (UID: "cf4e659c-6f03-41e2-b982-6079035bd42a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:58:24 crc kubenswrapper[4757]: I1006 13:58:24.712738 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf4e659c-6f03-41e2-b982-6079035bd42a-config-data" (OuterVolumeSpecName: "config-data") pod "cf4e659c-6f03-41e2-b982-6079035bd42a" (UID: "cf4e659c-6f03-41e2-b982-6079035bd42a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:58:24 crc kubenswrapper[4757]: I1006 13:58:24.786283 4757 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf4e659c-6f03-41e2-b982-6079035bd42a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 06 13:58:24 crc kubenswrapper[4757]: I1006 13:58:24.786332 4757 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf4e659c-6f03-41e2-b982-6079035bd42a-config-data\") on node \"crc\" DevicePath \"\"" Oct 06 13:58:24 crc kubenswrapper[4757]: I1006 13:58:24.894324 4757 generic.go:334] "Generic (PLEG): container finished" podID="cf4e659c-6f03-41e2-b982-6079035bd42a" containerID="7d149439c587d44b495c59d7b19ed16c0fe9ee268a5a213e8daa889470e05f3a" exitCode=0 Oct 06 13:58:24 crc kubenswrapper[4757]: I1006 13:58:24.894369 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cf4e659c-6f03-41e2-b982-6079035bd42a","Type":"ContainerDied","Data":"7d149439c587d44b495c59d7b19ed16c0fe9ee268a5a213e8daa889470e05f3a"} Oct 06 13:58:24 crc kubenswrapper[4757]: I1006 13:58:24.894395 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cf4e659c-6f03-41e2-b982-6079035bd42a","Type":"ContainerDied","Data":"415a057a0ef033119708f31292b62ea8db40222c52d2aaae7a61ab581beb2bb4"} Oct 06 13:58:24 crc kubenswrapper[4757]: I1006 13:58:24.894412 4757 scope.go:117] "RemoveContainer" containerID="fcb11eea12910f9e3e80285639401c65e1560951c47ae5887a3036488ccdd936" Oct 06 13:58:24 crc kubenswrapper[4757]: I1006 13:58:24.894597 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 06 13:58:24 crc kubenswrapper[4757]: I1006 13:58:24.923245 4757 scope.go:117] "RemoveContainer" containerID="73606ee2751c433602011494573328d8a9364b455759ca341bbda4a8f5e55743" Oct 06 13:58:24 crc kubenswrapper[4757]: I1006 13:58:24.946056 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 06 13:58:24 crc kubenswrapper[4757]: I1006 13:58:24.957408 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 06 13:58:24 crc kubenswrapper[4757]: I1006 13:58:24.966253 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 06 13:58:24 crc kubenswrapper[4757]: E1006 13:58:24.966692 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf4e659c-6f03-41e2-b982-6079035bd42a" containerName="ceilometer-notification-agent" Oct 06 13:58:24 crc kubenswrapper[4757]: I1006 13:58:24.966711 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf4e659c-6f03-41e2-b982-6079035bd42a" containerName="ceilometer-notification-agent" Oct 06 13:58:24 crc kubenswrapper[4757]: E1006 13:58:24.966727 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf4e659c-6f03-41e2-b982-6079035bd42a" containerName="ceilometer-central-agent" Oct 06 13:58:24 crc kubenswrapper[4757]: I1006 13:58:24.966734 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf4e659c-6f03-41e2-b982-6079035bd42a" containerName="ceilometer-central-agent" Oct 06 13:58:24 crc kubenswrapper[4757]: E1006 13:58:24.966760 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf4e659c-6f03-41e2-b982-6079035bd42a" containerName="sg-core" Oct 06 13:58:24 crc kubenswrapper[4757]: I1006 13:58:24.966768 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf4e659c-6f03-41e2-b982-6079035bd42a" containerName="sg-core" Oct 06 13:58:24 crc kubenswrapper[4757]: E1006 13:58:24.966784 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf4e659c-6f03-41e2-b982-6079035bd42a" containerName="proxy-httpd" Oct 06 13:58:24 crc kubenswrapper[4757]: I1006 13:58:24.966791 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf4e659c-6f03-41e2-b982-6079035bd42a" containerName="proxy-httpd" Oct 06 13:58:24 crc kubenswrapper[4757]: I1006 13:58:24.966961 4757 scope.go:117] "RemoveContainer" containerID="fb8312dd562b87a058b6fa37a9f20381d17877bbd057a0dc7295073b55d4ebfd" Oct 06 13:58:24 crc kubenswrapper[4757]: I1006 13:58:24.966990 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="cf4e659c-6f03-41e2-b982-6079035bd42a" containerName="proxy-httpd" Oct 06 13:58:24 crc kubenswrapper[4757]: I1006 13:58:24.967207 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="cf4e659c-6f03-41e2-b982-6079035bd42a" containerName="sg-core" Oct 06 13:58:24 crc kubenswrapper[4757]: I1006 13:58:24.967270 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="cf4e659c-6f03-41e2-b982-6079035bd42a" containerName="ceilometer-central-agent" Oct 06 13:58:24 crc kubenswrapper[4757]: I1006 13:58:24.967299 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="cf4e659c-6f03-41e2-b982-6079035bd42a" containerName="ceilometer-notification-agent" Oct 06 13:58:24 crc kubenswrapper[4757]: I1006 13:58:24.970475 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 06 13:58:24 crc kubenswrapper[4757]: I1006 13:58:24.973984 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 06 13:58:24 crc kubenswrapper[4757]: I1006 13:58:24.974946 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 06 13:58:24 crc kubenswrapper[4757]: I1006 13:58:24.988708 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47-run-httpd\") pod \"ceilometer-0\" (UID: \"ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47\") " pod="openstack/ceilometer-0" Oct 06 13:58:24 crc kubenswrapper[4757]: I1006 13:58:24.988885 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47-scripts\") pod \"ceilometer-0\" (UID: \"ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47\") " pod="openstack/ceilometer-0" Oct 06 13:58:24 crc kubenswrapper[4757]: I1006 13:58:24.988927 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47\") " pod="openstack/ceilometer-0" Oct 06 13:58:24 crc kubenswrapper[4757]: I1006 13:58:24.989000 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47-config-data\") pod \"ceilometer-0\" (UID: \"ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47\") " pod="openstack/ceilometer-0" Oct 06 13:58:24 crc kubenswrapper[4757]: I1006 13:58:24.989134 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4rvqj\" (UniqueName: \"kubernetes.io/projected/ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47-kube-api-access-4rvqj\") pod \"ceilometer-0\" (UID: \"ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47\") " pod="openstack/ceilometer-0" Oct 06 13:58:24 crc kubenswrapper[4757]: I1006 13:58:24.989188 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47\") " pod="openstack/ceilometer-0" Oct 06 13:58:24 crc kubenswrapper[4757]: I1006 13:58:24.989225 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47-log-httpd\") pod \"ceilometer-0\" (UID: \"ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47\") " pod="openstack/ceilometer-0" Oct 06 13:58:24 crc kubenswrapper[4757]: I1006 13:58:24.993404 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 06 13:58:25 crc kubenswrapper[4757]: I1006 13:58:25.004849 4757 scope.go:117] "RemoveContainer" containerID="7d149439c587d44b495c59d7b19ed16c0fe9ee268a5a213e8daa889470e05f3a" Oct 06 13:58:25 crc kubenswrapper[4757]: I1006 13:58:25.038454 4757 scope.go:117] "RemoveContainer" containerID="fcb11eea12910f9e3e80285639401c65e1560951c47ae5887a3036488ccdd936" Oct 06 13:58:25 crc kubenswrapper[4757]: E1006 13:58:25.038899 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fcb11eea12910f9e3e80285639401c65e1560951c47ae5887a3036488ccdd936\": container with ID starting with fcb11eea12910f9e3e80285639401c65e1560951c47ae5887a3036488ccdd936 not found: ID does not exist" containerID="fcb11eea12910f9e3e80285639401c65e1560951c47ae5887a3036488ccdd936" Oct 06 13:58:25 crc kubenswrapper[4757]: I1006 13:58:25.038935 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fcb11eea12910f9e3e80285639401c65e1560951c47ae5887a3036488ccdd936"} err="failed to get container status \"fcb11eea12910f9e3e80285639401c65e1560951c47ae5887a3036488ccdd936\": rpc error: code = NotFound desc = could not find container \"fcb11eea12910f9e3e80285639401c65e1560951c47ae5887a3036488ccdd936\": container with ID starting with fcb11eea12910f9e3e80285639401c65e1560951c47ae5887a3036488ccdd936 not found: ID does not exist" Oct 06 13:58:25 crc kubenswrapper[4757]: I1006 13:58:25.038960 4757 scope.go:117] "RemoveContainer" containerID="73606ee2751c433602011494573328d8a9364b455759ca341bbda4a8f5e55743" Oct 06 13:58:25 crc kubenswrapper[4757]: E1006 13:58:25.039248 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"73606ee2751c433602011494573328d8a9364b455759ca341bbda4a8f5e55743\": container with ID starting with 73606ee2751c433602011494573328d8a9364b455759ca341bbda4a8f5e55743 not found: ID does not exist" containerID="73606ee2751c433602011494573328d8a9364b455759ca341bbda4a8f5e55743" Oct 06 13:58:25 crc kubenswrapper[4757]: I1006 13:58:25.039278 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"73606ee2751c433602011494573328d8a9364b455759ca341bbda4a8f5e55743"} err="failed to get container status \"73606ee2751c433602011494573328d8a9364b455759ca341bbda4a8f5e55743\": rpc error: code = NotFound desc = could not find container \"73606ee2751c433602011494573328d8a9364b455759ca341bbda4a8f5e55743\": container with ID starting with 73606ee2751c433602011494573328d8a9364b455759ca341bbda4a8f5e55743 not found: ID does not exist" Oct 06 13:58:25 crc kubenswrapper[4757]: I1006 13:58:25.039298 4757 scope.go:117] "RemoveContainer" containerID="fb8312dd562b87a058b6fa37a9f20381d17877bbd057a0dc7295073b55d4ebfd" Oct 06 13:58:25 crc kubenswrapper[4757]: E1006 13:58:25.039515 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fb8312dd562b87a058b6fa37a9f20381d17877bbd057a0dc7295073b55d4ebfd\": container with ID starting with fb8312dd562b87a058b6fa37a9f20381d17877bbd057a0dc7295073b55d4ebfd not found: ID does not exist" containerID="fb8312dd562b87a058b6fa37a9f20381d17877bbd057a0dc7295073b55d4ebfd" Oct 06 13:58:25 crc kubenswrapper[4757]: I1006 13:58:25.039547 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fb8312dd562b87a058b6fa37a9f20381d17877bbd057a0dc7295073b55d4ebfd"} err="failed to get container status \"fb8312dd562b87a058b6fa37a9f20381d17877bbd057a0dc7295073b55d4ebfd\": rpc error: code = NotFound desc = could not find container \"fb8312dd562b87a058b6fa37a9f20381d17877bbd057a0dc7295073b55d4ebfd\": container with ID starting with fb8312dd562b87a058b6fa37a9f20381d17877bbd057a0dc7295073b55d4ebfd not found: ID does not exist" Oct 06 13:58:25 crc kubenswrapper[4757]: I1006 13:58:25.039568 4757 scope.go:117] "RemoveContainer" containerID="7d149439c587d44b495c59d7b19ed16c0fe9ee268a5a213e8daa889470e05f3a" Oct 06 13:58:25 crc kubenswrapper[4757]: E1006 13:58:25.039921 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7d149439c587d44b495c59d7b19ed16c0fe9ee268a5a213e8daa889470e05f3a\": container with ID starting with 7d149439c587d44b495c59d7b19ed16c0fe9ee268a5a213e8daa889470e05f3a not found: ID does not exist" containerID="7d149439c587d44b495c59d7b19ed16c0fe9ee268a5a213e8daa889470e05f3a" Oct 06 13:58:25 crc kubenswrapper[4757]: I1006 13:58:25.039973 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7d149439c587d44b495c59d7b19ed16c0fe9ee268a5a213e8daa889470e05f3a"} err="failed to get container status \"7d149439c587d44b495c59d7b19ed16c0fe9ee268a5a213e8daa889470e05f3a\": rpc error: code = NotFound desc = could not find container \"7d149439c587d44b495c59d7b19ed16c0fe9ee268a5a213e8daa889470e05f3a\": container with ID starting with 7d149439c587d44b495c59d7b19ed16c0fe9ee268a5a213e8daa889470e05f3a not found: ID does not exist" Oct 06 13:58:25 crc kubenswrapper[4757]: I1006 13:58:25.091055 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47\") " pod="openstack/ceilometer-0" Oct 06 13:58:25 crc kubenswrapper[4757]: I1006 13:58:25.091150 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47-config-data\") pod \"ceilometer-0\" (UID: \"ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47\") " pod="openstack/ceilometer-0" Oct 06 13:58:25 crc kubenswrapper[4757]: I1006 13:58:25.091228 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4rvqj\" (UniqueName: \"kubernetes.io/projected/ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47-kube-api-access-4rvqj\") pod \"ceilometer-0\" (UID: \"ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47\") " pod="openstack/ceilometer-0" Oct 06 13:58:25 crc kubenswrapper[4757]: I1006 13:58:25.091260 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47\") " pod="openstack/ceilometer-0" Oct 06 13:58:25 crc kubenswrapper[4757]: I1006 13:58:25.091283 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47-log-httpd\") pod \"ceilometer-0\" (UID: \"ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47\") " pod="openstack/ceilometer-0" Oct 06 13:58:25 crc kubenswrapper[4757]: I1006 13:58:25.091304 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47-run-httpd\") pod \"ceilometer-0\" (UID: \"ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47\") " pod="openstack/ceilometer-0" Oct 06 13:58:25 crc kubenswrapper[4757]: I1006 13:58:25.091396 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47-scripts\") pod \"ceilometer-0\" (UID: \"ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47\") " pod="openstack/ceilometer-0" Oct 06 13:58:25 crc kubenswrapper[4757]: I1006 13:58:25.096594 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47-log-httpd\") pod \"ceilometer-0\" (UID: \"ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47\") " pod="openstack/ceilometer-0" Oct 06 13:58:25 crc kubenswrapper[4757]: I1006 13:58:25.096946 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47-run-httpd\") pod \"ceilometer-0\" (UID: \"ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47\") " pod="openstack/ceilometer-0" Oct 06 13:58:25 crc kubenswrapper[4757]: I1006 13:58:25.097277 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47\") " pod="openstack/ceilometer-0" Oct 06 13:58:25 crc kubenswrapper[4757]: I1006 13:58:25.097411 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47-scripts\") pod \"ceilometer-0\" (UID: \"ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47\") " pod="openstack/ceilometer-0" Oct 06 13:58:25 crc kubenswrapper[4757]: I1006 13:58:25.099807 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47\") " pod="openstack/ceilometer-0" Oct 06 13:58:25 crc kubenswrapper[4757]: I1006 13:58:25.100013 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47-config-data\") pod \"ceilometer-0\" (UID: \"ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47\") " pod="openstack/ceilometer-0" Oct 06 13:58:25 crc kubenswrapper[4757]: I1006 13:58:25.111609 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4rvqj\" (UniqueName: \"kubernetes.io/projected/ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47-kube-api-access-4rvqj\") pod \"ceilometer-0\" (UID: \"ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47\") " pod="openstack/ceilometer-0" Oct 06 13:58:25 crc kubenswrapper[4757]: I1006 13:58:25.306348 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 06 13:58:25 crc kubenswrapper[4757]: I1006 13:58:25.828044 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 06 13:58:25 crc kubenswrapper[4757]: I1006 13:58:25.904652 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47","Type":"ContainerStarted","Data":"6f958aff6164c793b5c614726be15603a6af34e40faf68d041cd55f645e95bdd"} Oct 06 13:58:25 crc kubenswrapper[4757]: I1006 13:58:25.907918 4757 generic.go:334] "Generic (PLEG): container finished" podID="86a61f37-c383-4faf-b0b1-0abfd8866683" containerID="4b13bf86c84437f44865715abaf06cb2add1a3ab9683cc1eea5272ace7b108bd" exitCode=0 Oct 06 13:58:25 crc kubenswrapper[4757]: I1006 13:58:25.907953 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-ststm" event={"ID":"86a61f37-c383-4faf-b0b1-0abfd8866683","Type":"ContainerDied","Data":"4b13bf86c84437f44865715abaf06cb2add1a3ab9683cc1eea5272ace7b108bd"} Oct 06 13:58:26 crc kubenswrapper[4757]: I1006 13:58:26.202707 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cf4e659c-6f03-41e2-b982-6079035bd42a" path="/var/lib/kubelet/pods/cf4e659c-6f03-41e2-b982-6079035bd42a/volumes" Oct 06 13:58:26 crc kubenswrapper[4757]: I1006 13:58:26.918762 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47","Type":"ContainerStarted","Data":"ac3bad272e5564633193b1c1997f349311b95475590b3bd7e88073bb397a0220"} Oct 06 13:58:27 crc kubenswrapper[4757]: I1006 13:58:27.199330 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-ststm" Oct 06 13:58:27 crc kubenswrapper[4757]: I1006 13:58:27.335921 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/86a61f37-c383-4faf-b0b1-0abfd8866683-combined-ca-bundle\") pod \"86a61f37-c383-4faf-b0b1-0abfd8866683\" (UID: \"86a61f37-c383-4faf-b0b1-0abfd8866683\") " Oct 06 13:58:27 crc kubenswrapper[4757]: I1006 13:58:27.336007 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/86a61f37-c383-4faf-b0b1-0abfd8866683-config-data\") pod \"86a61f37-c383-4faf-b0b1-0abfd8866683\" (UID: \"86a61f37-c383-4faf-b0b1-0abfd8866683\") " Oct 06 13:58:27 crc kubenswrapper[4757]: I1006 13:58:27.336109 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8k5nq\" (UniqueName: \"kubernetes.io/projected/86a61f37-c383-4faf-b0b1-0abfd8866683-kube-api-access-8k5nq\") pod \"86a61f37-c383-4faf-b0b1-0abfd8866683\" (UID: \"86a61f37-c383-4faf-b0b1-0abfd8866683\") " Oct 06 13:58:27 crc kubenswrapper[4757]: I1006 13:58:27.336210 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/86a61f37-c383-4faf-b0b1-0abfd8866683-scripts\") pod \"86a61f37-c383-4faf-b0b1-0abfd8866683\" (UID: \"86a61f37-c383-4faf-b0b1-0abfd8866683\") " Oct 06 13:58:27 crc kubenswrapper[4757]: I1006 13:58:27.341637 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/86a61f37-c383-4faf-b0b1-0abfd8866683-kube-api-access-8k5nq" (OuterVolumeSpecName: "kube-api-access-8k5nq") pod "86a61f37-c383-4faf-b0b1-0abfd8866683" (UID: "86a61f37-c383-4faf-b0b1-0abfd8866683"). InnerVolumeSpecName "kube-api-access-8k5nq". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:58:27 crc kubenswrapper[4757]: I1006 13:58:27.345702 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/86a61f37-c383-4faf-b0b1-0abfd8866683-scripts" (OuterVolumeSpecName: "scripts") pod "86a61f37-c383-4faf-b0b1-0abfd8866683" (UID: "86a61f37-c383-4faf-b0b1-0abfd8866683"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:58:27 crc kubenswrapper[4757]: I1006 13:58:27.365074 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/86a61f37-c383-4faf-b0b1-0abfd8866683-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "86a61f37-c383-4faf-b0b1-0abfd8866683" (UID: "86a61f37-c383-4faf-b0b1-0abfd8866683"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:58:27 crc kubenswrapper[4757]: I1006 13:58:27.370168 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/86a61f37-c383-4faf-b0b1-0abfd8866683-config-data" (OuterVolumeSpecName: "config-data") pod "86a61f37-c383-4faf-b0b1-0abfd8866683" (UID: "86a61f37-c383-4faf-b0b1-0abfd8866683"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:58:27 crc kubenswrapper[4757]: I1006 13:58:27.438022 4757 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/86a61f37-c383-4faf-b0b1-0abfd8866683-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 06 13:58:27 crc kubenswrapper[4757]: I1006 13:58:27.438050 4757 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/86a61f37-c383-4faf-b0b1-0abfd8866683-config-data\") on node \"crc\" DevicePath \"\"" Oct 06 13:58:27 crc kubenswrapper[4757]: I1006 13:58:27.438060 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8k5nq\" (UniqueName: \"kubernetes.io/projected/86a61f37-c383-4faf-b0b1-0abfd8866683-kube-api-access-8k5nq\") on node \"crc\" DevicePath \"\"" Oct 06 13:58:27 crc kubenswrapper[4757]: I1006 13:58:27.438071 4757 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/86a61f37-c383-4faf-b0b1-0abfd8866683-scripts\") on node \"crc\" DevicePath \"\"" Oct 06 13:58:27 crc kubenswrapper[4757]: I1006 13:58:27.927448 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47","Type":"ContainerStarted","Data":"b5bc4119c5a857d4b1ba2eb009cf2df1068fde0166eef78dcff4afde617f17fa"} Oct 06 13:58:27 crc kubenswrapper[4757]: I1006 13:58:27.927837 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47","Type":"ContainerStarted","Data":"658cb7c51d7f66f97a39766bccc5829bc3886955f1f1889fb60f7608fa3c1727"} Oct 06 13:58:27 crc kubenswrapper[4757]: I1006 13:58:27.930835 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-ststm" event={"ID":"86a61f37-c383-4faf-b0b1-0abfd8866683","Type":"ContainerDied","Data":"52fdb32606935a1e33a1c091d47d3b50d588e7bd63d62b8bf30511463c69ee28"} Oct 06 13:58:27 crc kubenswrapper[4757]: I1006 13:58:27.930869 4757 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="52fdb32606935a1e33a1c091d47d3b50d588e7bd63d62b8bf30511463c69ee28" Oct 06 13:58:27 crc kubenswrapper[4757]: I1006 13:58:27.930902 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-ststm" Oct 06 13:58:28 crc kubenswrapper[4757]: I1006 13:58:28.048758 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 06 13:58:28 crc kubenswrapper[4757]: E1006 13:58:28.049324 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="86a61f37-c383-4faf-b0b1-0abfd8866683" containerName="nova-cell0-conductor-db-sync" Oct 06 13:58:28 crc kubenswrapper[4757]: I1006 13:58:28.049347 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="86a61f37-c383-4faf-b0b1-0abfd8866683" containerName="nova-cell0-conductor-db-sync" Oct 06 13:58:28 crc kubenswrapper[4757]: I1006 13:58:28.049608 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="86a61f37-c383-4faf-b0b1-0abfd8866683" containerName="nova-cell0-conductor-db-sync" Oct 06 13:58:28 crc kubenswrapper[4757]: I1006 13:58:28.050420 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Oct 06 13:58:28 crc kubenswrapper[4757]: I1006 13:58:28.057986 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-hwxdw" Oct 06 13:58:28 crc kubenswrapper[4757]: I1006 13:58:28.058414 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Oct 06 13:58:28 crc kubenswrapper[4757]: I1006 13:58:28.059225 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/924963ee-1194-4d98-84d7-9bb3e426f7bc-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"924963ee-1194-4d98-84d7-9bb3e426f7bc\") " pod="openstack/nova-cell0-conductor-0" Oct 06 13:58:28 crc kubenswrapper[4757]: I1006 13:58:28.059337 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cpssb\" (UniqueName: \"kubernetes.io/projected/924963ee-1194-4d98-84d7-9bb3e426f7bc-kube-api-access-cpssb\") pod \"nova-cell0-conductor-0\" (UID: \"924963ee-1194-4d98-84d7-9bb3e426f7bc\") " pod="openstack/nova-cell0-conductor-0" Oct 06 13:58:28 crc kubenswrapper[4757]: I1006 13:58:28.059374 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/924963ee-1194-4d98-84d7-9bb3e426f7bc-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"924963ee-1194-4d98-84d7-9bb3e426f7bc\") " pod="openstack/nova-cell0-conductor-0" Oct 06 13:58:28 crc kubenswrapper[4757]: I1006 13:58:28.067898 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 06 13:58:28 crc kubenswrapper[4757]: I1006 13:58:28.161301 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cpssb\" (UniqueName: \"kubernetes.io/projected/924963ee-1194-4d98-84d7-9bb3e426f7bc-kube-api-access-cpssb\") pod \"nova-cell0-conductor-0\" (UID: \"924963ee-1194-4d98-84d7-9bb3e426f7bc\") " pod="openstack/nova-cell0-conductor-0" Oct 06 13:58:28 crc kubenswrapper[4757]: I1006 13:58:28.161783 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/924963ee-1194-4d98-84d7-9bb3e426f7bc-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"924963ee-1194-4d98-84d7-9bb3e426f7bc\") " pod="openstack/nova-cell0-conductor-0" Oct 06 13:58:28 crc kubenswrapper[4757]: I1006 13:58:28.162158 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/924963ee-1194-4d98-84d7-9bb3e426f7bc-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"924963ee-1194-4d98-84d7-9bb3e426f7bc\") " pod="openstack/nova-cell0-conductor-0" Oct 06 13:58:28 crc kubenswrapper[4757]: I1006 13:58:28.166306 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/924963ee-1194-4d98-84d7-9bb3e426f7bc-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"924963ee-1194-4d98-84d7-9bb3e426f7bc\") " pod="openstack/nova-cell0-conductor-0" Oct 06 13:58:28 crc kubenswrapper[4757]: I1006 13:58:28.166782 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/924963ee-1194-4d98-84d7-9bb3e426f7bc-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"924963ee-1194-4d98-84d7-9bb3e426f7bc\") " pod="openstack/nova-cell0-conductor-0" Oct 06 13:58:28 crc kubenswrapper[4757]: I1006 13:58:28.176599 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cpssb\" (UniqueName: \"kubernetes.io/projected/924963ee-1194-4d98-84d7-9bb3e426f7bc-kube-api-access-cpssb\") pod \"nova-cell0-conductor-0\" (UID: \"924963ee-1194-4d98-84d7-9bb3e426f7bc\") " pod="openstack/nova-cell0-conductor-0" Oct 06 13:58:28 crc kubenswrapper[4757]: I1006 13:58:28.373418 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Oct 06 13:58:28 crc kubenswrapper[4757]: I1006 13:58:28.835410 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 06 13:58:28 crc kubenswrapper[4757]: I1006 13:58:28.956544 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"924963ee-1194-4d98-84d7-9bb3e426f7bc","Type":"ContainerStarted","Data":"82b0009e68461935aa036599f7fa8aebc992b0d116fd84f806bb8dcdbd1ced96"} Oct 06 13:58:29 crc kubenswrapper[4757]: I1006 13:58:29.970786 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"924963ee-1194-4d98-84d7-9bb3e426f7bc","Type":"ContainerStarted","Data":"9ce79a47b7eef2fc901e662389fe9df01b87649a04e94bc2a16eff829fe1027b"} Oct 06 13:58:29 crc kubenswrapper[4757]: I1006 13:58:29.971169 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Oct 06 13:58:29 crc kubenswrapper[4757]: I1006 13:58:29.975952 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47","Type":"ContainerStarted","Data":"ea2cdab8c1132befe3158a8a0f7e7deff0026d038ebdef647b5c872e4d0bc30c"} Oct 06 13:58:29 crc kubenswrapper[4757]: I1006 13:58:29.976220 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 06 13:58:30 crc kubenswrapper[4757]: I1006 13:58:30.001923 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.001895332 podStartE2EDuration="2.001895332s" podCreationTimestamp="2025-10-06 13:58:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:58:29.990700544 +0000 UTC m=+1198.488019081" watchObservedRunningTime="2025-10-06 13:58:30.001895332 +0000 UTC m=+1198.499213909" Oct 06 13:58:30 crc kubenswrapper[4757]: I1006 13:58:30.023058 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.941554137 podStartE2EDuration="6.023037978s" podCreationTimestamp="2025-10-06 13:58:24 +0000 UTC" firstStartedPulling="2025-10-06 13:58:25.834690491 +0000 UTC m=+1194.332009028" lastFinishedPulling="2025-10-06 13:58:28.916174332 +0000 UTC m=+1197.413492869" observedRunningTime="2025-10-06 13:58:30.021969684 +0000 UTC m=+1198.519288251" watchObservedRunningTime="2025-10-06 13:58:30.023037978 +0000 UTC m=+1198.520356515" Oct 06 13:58:38 crc kubenswrapper[4757]: I1006 13:58:38.424077 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Oct 06 13:58:39 crc kubenswrapper[4757]: I1006 13:58:39.039687 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-95qtm"] Oct 06 13:58:39 crc kubenswrapper[4757]: I1006 13:58:39.041326 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-95qtm" Oct 06 13:58:39 crc kubenswrapper[4757]: I1006 13:58:39.044406 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Oct 06 13:58:39 crc kubenswrapper[4757]: I1006 13:58:39.044442 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Oct 06 13:58:39 crc kubenswrapper[4757]: I1006 13:58:39.064792 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-95qtm"] Oct 06 13:58:39 crc kubenswrapper[4757]: I1006 13:58:39.225674 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ccc640a0-578c-4bdd-9c71-789d3c821099-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-95qtm\" (UID: \"ccc640a0-578c-4bdd-9c71-789d3c821099\") " pod="openstack/nova-cell0-cell-mapping-95qtm" Oct 06 13:58:39 crc kubenswrapper[4757]: I1006 13:58:39.225867 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ccc640a0-578c-4bdd-9c71-789d3c821099-scripts\") pod \"nova-cell0-cell-mapping-95qtm\" (UID: \"ccc640a0-578c-4bdd-9c71-789d3c821099\") " pod="openstack/nova-cell0-cell-mapping-95qtm" Oct 06 13:58:39 crc kubenswrapper[4757]: I1006 13:58:39.225999 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ccc640a0-578c-4bdd-9c71-789d3c821099-config-data\") pod \"nova-cell0-cell-mapping-95qtm\" (UID: \"ccc640a0-578c-4bdd-9c71-789d3c821099\") " pod="openstack/nova-cell0-cell-mapping-95qtm" Oct 06 13:58:39 crc kubenswrapper[4757]: I1006 13:58:39.226146 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lk4wr\" (UniqueName: \"kubernetes.io/projected/ccc640a0-578c-4bdd-9c71-789d3c821099-kube-api-access-lk4wr\") pod \"nova-cell0-cell-mapping-95qtm\" (UID: \"ccc640a0-578c-4bdd-9c71-789d3c821099\") " pod="openstack/nova-cell0-cell-mapping-95qtm" Oct 06 13:58:39 crc kubenswrapper[4757]: I1006 13:58:39.271159 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Oct 06 13:58:39 crc kubenswrapper[4757]: I1006 13:58:39.272955 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 06 13:58:39 crc kubenswrapper[4757]: I1006 13:58:39.283684 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 06 13:58:39 crc kubenswrapper[4757]: I1006 13:58:39.284252 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Oct 06 13:58:39 crc kubenswrapper[4757]: I1006 13:58:39.301757 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Oct 06 13:58:39 crc kubenswrapper[4757]: I1006 13:58:39.303224 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 06 13:58:39 crc kubenswrapper[4757]: I1006 13:58:39.310663 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Oct 06 13:58:39 crc kubenswrapper[4757]: I1006 13:58:39.327178 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 06 13:58:39 crc kubenswrapper[4757]: I1006 13:58:39.328327 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lk4wr\" (UniqueName: \"kubernetes.io/projected/ccc640a0-578c-4bdd-9c71-789d3c821099-kube-api-access-lk4wr\") pod \"nova-cell0-cell-mapping-95qtm\" (UID: \"ccc640a0-578c-4bdd-9c71-789d3c821099\") " pod="openstack/nova-cell0-cell-mapping-95qtm" Oct 06 13:58:39 crc kubenswrapper[4757]: I1006 13:58:39.328411 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ccc640a0-578c-4bdd-9c71-789d3c821099-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-95qtm\" (UID: \"ccc640a0-578c-4bdd-9c71-789d3c821099\") " pod="openstack/nova-cell0-cell-mapping-95qtm" Oct 06 13:58:39 crc kubenswrapper[4757]: I1006 13:58:39.328467 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ccc640a0-578c-4bdd-9c71-789d3c821099-scripts\") pod \"nova-cell0-cell-mapping-95qtm\" (UID: \"ccc640a0-578c-4bdd-9c71-789d3c821099\") " pod="openstack/nova-cell0-cell-mapping-95qtm" Oct 06 13:58:39 crc kubenswrapper[4757]: I1006 13:58:39.328541 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ccc640a0-578c-4bdd-9c71-789d3c821099-config-data\") pod \"nova-cell0-cell-mapping-95qtm\" (UID: \"ccc640a0-578c-4bdd-9c71-789d3c821099\") " pod="openstack/nova-cell0-cell-mapping-95qtm" Oct 06 13:58:39 crc kubenswrapper[4757]: I1006 13:58:39.356066 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ccc640a0-578c-4bdd-9c71-789d3c821099-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-95qtm\" (UID: \"ccc640a0-578c-4bdd-9c71-789d3c821099\") " pod="openstack/nova-cell0-cell-mapping-95qtm" Oct 06 13:58:39 crc kubenswrapper[4757]: I1006 13:58:39.360844 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ccc640a0-578c-4bdd-9c71-789d3c821099-scripts\") pod \"nova-cell0-cell-mapping-95qtm\" (UID: \"ccc640a0-578c-4bdd-9c71-789d3c821099\") " pod="openstack/nova-cell0-cell-mapping-95qtm" Oct 06 13:58:39 crc kubenswrapper[4757]: I1006 13:58:39.365746 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Oct 06 13:58:39 crc kubenswrapper[4757]: I1006 13:58:39.372903 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ccc640a0-578c-4bdd-9c71-789d3c821099-config-data\") pod \"nova-cell0-cell-mapping-95qtm\" (UID: \"ccc640a0-578c-4bdd-9c71-789d3c821099\") " pod="openstack/nova-cell0-cell-mapping-95qtm" Oct 06 13:58:39 crc kubenswrapper[4757]: I1006 13:58:39.374383 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lk4wr\" (UniqueName: \"kubernetes.io/projected/ccc640a0-578c-4bdd-9c71-789d3c821099-kube-api-access-lk4wr\") pod \"nova-cell0-cell-mapping-95qtm\" (UID: \"ccc640a0-578c-4bdd-9c71-789d3c821099\") " pod="openstack/nova-cell0-cell-mapping-95qtm" Oct 06 13:58:39 crc kubenswrapper[4757]: I1006 13:58:39.380841 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 06 13:58:39 crc kubenswrapper[4757]: I1006 13:58:39.383402 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Oct 06 13:58:39 crc kubenswrapper[4757]: I1006 13:58:39.411451 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 06 13:58:39 crc kubenswrapper[4757]: I1006 13:58:39.429552 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cc57d075-eaca-4298-a76d-903d3fa3800b-logs\") pod \"nova-api-0\" (UID: \"cc57d075-eaca-4298-a76d-903d3fa3800b\") " pod="openstack/nova-api-0" Oct 06 13:58:39 crc kubenswrapper[4757]: I1006 13:58:39.429599 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/df4d9101-39da-4876-9509-ecf7563515ee-logs\") pod \"nova-metadata-0\" (UID: \"df4d9101-39da-4876-9509-ecf7563515ee\") " pod="openstack/nova-metadata-0" Oct 06 13:58:39 crc kubenswrapper[4757]: I1006 13:58:39.429629 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jdmm4\" (UniqueName: \"kubernetes.io/projected/df4d9101-39da-4876-9509-ecf7563515ee-kube-api-access-jdmm4\") pod \"nova-metadata-0\" (UID: \"df4d9101-39da-4876-9509-ecf7563515ee\") " pod="openstack/nova-metadata-0" Oct 06 13:58:39 crc kubenswrapper[4757]: I1006 13:58:39.429688 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dfnvr\" (UniqueName: \"kubernetes.io/projected/cc57d075-eaca-4298-a76d-903d3fa3800b-kube-api-access-dfnvr\") pod \"nova-api-0\" (UID: \"cc57d075-eaca-4298-a76d-903d3fa3800b\") " pod="openstack/nova-api-0" Oct 06 13:58:39 crc kubenswrapper[4757]: I1006 13:58:39.429710 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cc57d075-eaca-4298-a76d-903d3fa3800b-config-data\") pod \"nova-api-0\" (UID: \"cc57d075-eaca-4298-a76d-903d3fa3800b\") " pod="openstack/nova-api-0" Oct 06 13:58:39 crc kubenswrapper[4757]: I1006 13:58:39.429724 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc57d075-eaca-4298-a76d-903d3fa3800b-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"cc57d075-eaca-4298-a76d-903d3fa3800b\") " pod="openstack/nova-api-0" Oct 06 13:58:39 crc kubenswrapper[4757]: I1006 13:58:39.429756 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/df4d9101-39da-4876-9509-ecf7563515ee-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"df4d9101-39da-4876-9509-ecf7563515ee\") " pod="openstack/nova-metadata-0" Oct 06 13:58:39 crc kubenswrapper[4757]: I1006 13:58:39.429783 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/df4d9101-39da-4876-9509-ecf7563515ee-config-data\") pod \"nova-metadata-0\" (UID: \"df4d9101-39da-4876-9509-ecf7563515ee\") " pod="openstack/nova-metadata-0" Oct 06 13:58:39 crc kubenswrapper[4757]: I1006 13:58:39.458159 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-86bc686f95-5b58c"] Oct 06 13:58:39 crc kubenswrapper[4757]: I1006 13:58:39.459707 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86bc686f95-5b58c" Oct 06 13:58:39 crc kubenswrapper[4757]: I1006 13:58:39.471048 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 06 13:58:39 crc kubenswrapper[4757]: I1006 13:58:39.473515 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 06 13:58:39 crc kubenswrapper[4757]: I1006 13:58:39.475811 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Oct 06 13:58:39 crc kubenswrapper[4757]: I1006 13:58:39.514958 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 06 13:58:39 crc kubenswrapper[4757]: I1006 13:58:39.524137 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-86bc686f95-5b58c"] Oct 06 13:58:39 crc kubenswrapper[4757]: I1006 13:58:39.531609 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tv489\" (UniqueName: \"kubernetes.io/projected/0629d26d-a456-477c-9adf-54885b0ff36f-kube-api-access-tv489\") pod \"nova-scheduler-0\" (UID: \"0629d26d-a456-477c-9adf-54885b0ff36f\") " pod="openstack/nova-scheduler-0" Oct 06 13:58:39 crc kubenswrapper[4757]: I1006 13:58:39.531693 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dfnvr\" (UniqueName: \"kubernetes.io/projected/cc57d075-eaca-4298-a76d-903d3fa3800b-kube-api-access-dfnvr\") pod \"nova-api-0\" (UID: \"cc57d075-eaca-4298-a76d-903d3fa3800b\") " pod="openstack/nova-api-0" Oct 06 13:58:39 crc kubenswrapper[4757]: I1006 13:58:39.531734 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cc57d075-eaca-4298-a76d-903d3fa3800b-config-data\") pod \"nova-api-0\" (UID: \"cc57d075-eaca-4298-a76d-903d3fa3800b\") " pod="openstack/nova-api-0" Oct 06 13:58:39 crc kubenswrapper[4757]: I1006 13:58:39.531751 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc57d075-eaca-4298-a76d-903d3fa3800b-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"cc57d075-eaca-4298-a76d-903d3fa3800b\") " pod="openstack/nova-api-0" Oct 06 13:58:39 crc kubenswrapper[4757]: I1006 13:58:39.531782 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0629d26d-a456-477c-9adf-54885b0ff36f-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"0629d26d-a456-477c-9adf-54885b0ff36f\") " pod="openstack/nova-scheduler-0" Oct 06 13:58:39 crc kubenswrapper[4757]: I1006 13:58:39.531815 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/df4d9101-39da-4876-9509-ecf7563515ee-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"df4d9101-39da-4876-9509-ecf7563515ee\") " pod="openstack/nova-metadata-0" Oct 06 13:58:39 crc kubenswrapper[4757]: I1006 13:58:39.531849 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0629d26d-a456-477c-9adf-54885b0ff36f-config-data\") pod \"nova-scheduler-0\" (UID: \"0629d26d-a456-477c-9adf-54885b0ff36f\") " pod="openstack/nova-scheduler-0" Oct 06 13:58:39 crc kubenswrapper[4757]: I1006 13:58:39.531871 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/df4d9101-39da-4876-9509-ecf7563515ee-config-data\") pod \"nova-metadata-0\" (UID: \"df4d9101-39da-4876-9509-ecf7563515ee\") " pod="openstack/nova-metadata-0" Oct 06 13:58:39 crc kubenswrapper[4757]: I1006 13:58:39.531944 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cc57d075-eaca-4298-a76d-903d3fa3800b-logs\") pod \"nova-api-0\" (UID: \"cc57d075-eaca-4298-a76d-903d3fa3800b\") " pod="openstack/nova-api-0" Oct 06 13:58:39 crc kubenswrapper[4757]: I1006 13:58:39.531978 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/df4d9101-39da-4876-9509-ecf7563515ee-logs\") pod \"nova-metadata-0\" (UID: \"df4d9101-39da-4876-9509-ecf7563515ee\") " pod="openstack/nova-metadata-0" Oct 06 13:58:39 crc kubenswrapper[4757]: I1006 13:58:39.532017 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jdmm4\" (UniqueName: \"kubernetes.io/projected/df4d9101-39da-4876-9509-ecf7563515ee-kube-api-access-jdmm4\") pod \"nova-metadata-0\" (UID: \"df4d9101-39da-4876-9509-ecf7563515ee\") " pod="openstack/nova-metadata-0" Oct 06 13:58:39 crc kubenswrapper[4757]: I1006 13:58:39.536347 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cc57d075-eaca-4298-a76d-903d3fa3800b-logs\") pod \"nova-api-0\" (UID: \"cc57d075-eaca-4298-a76d-903d3fa3800b\") " pod="openstack/nova-api-0" Oct 06 13:58:39 crc kubenswrapper[4757]: I1006 13:58:39.536710 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/df4d9101-39da-4876-9509-ecf7563515ee-logs\") pod \"nova-metadata-0\" (UID: \"df4d9101-39da-4876-9509-ecf7563515ee\") " pod="openstack/nova-metadata-0" Oct 06 13:58:39 crc kubenswrapper[4757]: I1006 13:58:39.538675 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cc57d075-eaca-4298-a76d-903d3fa3800b-config-data\") pod \"nova-api-0\" (UID: \"cc57d075-eaca-4298-a76d-903d3fa3800b\") " pod="openstack/nova-api-0" Oct 06 13:58:39 crc kubenswrapper[4757]: I1006 13:58:39.539783 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/df4d9101-39da-4876-9509-ecf7563515ee-config-data\") pod \"nova-metadata-0\" (UID: \"df4d9101-39da-4876-9509-ecf7563515ee\") " pod="openstack/nova-metadata-0" Oct 06 13:58:39 crc kubenswrapper[4757]: I1006 13:58:39.546855 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/df4d9101-39da-4876-9509-ecf7563515ee-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"df4d9101-39da-4876-9509-ecf7563515ee\") " pod="openstack/nova-metadata-0" Oct 06 13:58:39 crc kubenswrapper[4757]: I1006 13:58:39.548613 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc57d075-eaca-4298-a76d-903d3fa3800b-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"cc57d075-eaca-4298-a76d-903d3fa3800b\") " pod="openstack/nova-api-0" Oct 06 13:58:39 crc kubenswrapper[4757]: I1006 13:58:39.548943 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dfnvr\" (UniqueName: \"kubernetes.io/projected/cc57d075-eaca-4298-a76d-903d3fa3800b-kube-api-access-dfnvr\") pod \"nova-api-0\" (UID: \"cc57d075-eaca-4298-a76d-903d3fa3800b\") " pod="openstack/nova-api-0" Oct 06 13:58:39 crc kubenswrapper[4757]: I1006 13:58:39.556487 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jdmm4\" (UniqueName: \"kubernetes.io/projected/df4d9101-39da-4876-9509-ecf7563515ee-kube-api-access-jdmm4\") pod \"nova-metadata-0\" (UID: \"df4d9101-39da-4876-9509-ecf7563515ee\") " pod="openstack/nova-metadata-0" Oct 06 13:58:39 crc kubenswrapper[4757]: I1006 13:58:39.611278 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 06 13:58:39 crc kubenswrapper[4757]: I1006 13:58:39.628285 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 06 13:58:39 crc kubenswrapper[4757]: I1006 13:58:39.633698 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0629d26d-a456-477c-9adf-54885b0ff36f-config-data\") pod \"nova-scheduler-0\" (UID: \"0629d26d-a456-477c-9adf-54885b0ff36f\") " pod="openstack/nova-scheduler-0" Oct 06 13:58:39 crc kubenswrapper[4757]: I1006 13:58:39.633756 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/47ec18a3-c53e-48e4-aae7-f32b19af25a8-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"47ec18a3-c53e-48e4-aae7-f32b19af25a8\") " pod="openstack/nova-cell1-novncproxy-0" Oct 06 13:58:39 crc kubenswrapper[4757]: I1006 13:58:39.633814 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c5749667-46f8-48cc-b934-772352cf8bf8-ovsdbserver-nb\") pod \"dnsmasq-dns-86bc686f95-5b58c\" (UID: \"c5749667-46f8-48cc-b934-772352cf8bf8\") " pod="openstack/dnsmasq-dns-86bc686f95-5b58c" Oct 06 13:58:39 crc kubenswrapper[4757]: I1006 13:58:39.633842 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c5749667-46f8-48cc-b934-772352cf8bf8-config\") pod \"dnsmasq-dns-86bc686f95-5b58c\" (UID: \"c5749667-46f8-48cc-b934-772352cf8bf8\") " pod="openstack/dnsmasq-dns-86bc686f95-5b58c" Oct 06 13:58:39 crc kubenswrapper[4757]: I1006 13:58:39.633860 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2l565\" (UniqueName: \"kubernetes.io/projected/47ec18a3-c53e-48e4-aae7-f32b19af25a8-kube-api-access-2l565\") pod \"nova-cell1-novncproxy-0\" (UID: \"47ec18a3-c53e-48e4-aae7-f32b19af25a8\") " pod="openstack/nova-cell1-novncproxy-0" Oct 06 13:58:39 crc kubenswrapper[4757]: I1006 13:58:39.633878 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/47ec18a3-c53e-48e4-aae7-f32b19af25a8-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"47ec18a3-c53e-48e4-aae7-f32b19af25a8\") " pod="openstack/nova-cell1-novncproxy-0" Oct 06 13:58:39 crc kubenswrapper[4757]: I1006 13:58:39.633905 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tv489\" (UniqueName: \"kubernetes.io/projected/0629d26d-a456-477c-9adf-54885b0ff36f-kube-api-access-tv489\") pod \"nova-scheduler-0\" (UID: \"0629d26d-a456-477c-9adf-54885b0ff36f\") " pod="openstack/nova-scheduler-0" Oct 06 13:58:39 crc kubenswrapper[4757]: I1006 13:58:39.633923 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c5749667-46f8-48cc-b934-772352cf8bf8-ovsdbserver-sb\") pod \"dnsmasq-dns-86bc686f95-5b58c\" (UID: \"c5749667-46f8-48cc-b934-772352cf8bf8\") " pod="openstack/dnsmasq-dns-86bc686f95-5b58c" Oct 06 13:58:39 crc kubenswrapper[4757]: I1006 13:58:39.633966 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fz4vz\" (UniqueName: \"kubernetes.io/projected/c5749667-46f8-48cc-b934-772352cf8bf8-kube-api-access-fz4vz\") pod \"dnsmasq-dns-86bc686f95-5b58c\" (UID: \"c5749667-46f8-48cc-b934-772352cf8bf8\") " pod="openstack/dnsmasq-dns-86bc686f95-5b58c" Oct 06 13:58:39 crc kubenswrapper[4757]: I1006 13:58:39.633986 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c5749667-46f8-48cc-b934-772352cf8bf8-dns-svc\") pod \"dnsmasq-dns-86bc686f95-5b58c\" (UID: \"c5749667-46f8-48cc-b934-772352cf8bf8\") " pod="openstack/dnsmasq-dns-86bc686f95-5b58c" Oct 06 13:58:39 crc kubenswrapper[4757]: I1006 13:58:39.634005 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0629d26d-a456-477c-9adf-54885b0ff36f-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"0629d26d-a456-477c-9adf-54885b0ff36f\") " pod="openstack/nova-scheduler-0" Oct 06 13:58:39 crc kubenswrapper[4757]: I1006 13:58:39.634021 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c5749667-46f8-48cc-b934-772352cf8bf8-dns-swift-storage-0\") pod \"dnsmasq-dns-86bc686f95-5b58c\" (UID: \"c5749667-46f8-48cc-b934-772352cf8bf8\") " pod="openstack/dnsmasq-dns-86bc686f95-5b58c" Oct 06 13:58:39 crc kubenswrapper[4757]: I1006 13:58:39.642731 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0629d26d-a456-477c-9adf-54885b0ff36f-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"0629d26d-a456-477c-9adf-54885b0ff36f\") " pod="openstack/nova-scheduler-0" Oct 06 13:58:39 crc kubenswrapper[4757]: I1006 13:58:39.645721 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0629d26d-a456-477c-9adf-54885b0ff36f-config-data\") pod \"nova-scheduler-0\" (UID: \"0629d26d-a456-477c-9adf-54885b0ff36f\") " pod="openstack/nova-scheduler-0" Oct 06 13:58:39 crc kubenswrapper[4757]: I1006 13:58:39.653736 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tv489\" (UniqueName: \"kubernetes.io/projected/0629d26d-a456-477c-9adf-54885b0ff36f-kube-api-access-tv489\") pod \"nova-scheduler-0\" (UID: \"0629d26d-a456-477c-9adf-54885b0ff36f\") " pod="openstack/nova-scheduler-0" Oct 06 13:58:39 crc kubenswrapper[4757]: I1006 13:58:39.663741 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-95qtm" Oct 06 13:58:39 crc kubenswrapper[4757]: I1006 13:58:39.737348 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c5749667-46f8-48cc-b934-772352cf8bf8-ovsdbserver-nb\") pod \"dnsmasq-dns-86bc686f95-5b58c\" (UID: \"c5749667-46f8-48cc-b934-772352cf8bf8\") " pod="openstack/dnsmasq-dns-86bc686f95-5b58c" Oct 06 13:58:39 crc kubenswrapper[4757]: I1006 13:58:39.737685 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c5749667-46f8-48cc-b934-772352cf8bf8-config\") pod \"dnsmasq-dns-86bc686f95-5b58c\" (UID: \"c5749667-46f8-48cc-b934-772352cf8bf8\") " pod="openstack/dnsmasq-dns-86bc686f95-5b58c" Oct 06 13:58:39 crc kubenswrapper[4757]: I1006 13:58:39.737712 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2l565\" (UniqueName: \"kubernetes.io/projected/47ec18a3-c53e-48e4-aae7-f32b19af25a8-kube-api-access-2l565\") pod \"nova-cell1-novncproxy-0\" (UID: \"47ec18a3-c53e-48e4-aae7-f32b19af25a8\") " pod="openstack/nova-cell1-novncproxy-0" Oct 06 13:58:39 crc kubenswrapper[4757]: I1006 13:58:39.737742 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/47ec18a3-c53e-48e4-aae7-f32b19af25a8-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"47ec18a3-c53e-48e4-aae7-f32b19af25a8\") " pod="openstack/nova-cell1-novncproxy-0" Oct 06 13:58:39 crc kubenswrapper[4757]: I1006 13:58:39.737778 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c5749667-46f8-48cc-b934-772352cf8bf8-ovsdbserver-sb\") pod \"dnsmasq-dns-86bc686f95-5b58c\" (UID: \"c5749667-46f8-48cc-b934-772352cf8bf8\") " pod="openstack/dnsmasq-dns-86bc686f95-5b58c" Oct 06 13:58:39 crc kubenswrapper[4757]: I1006 13:58:39.737863 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fz4vz\" (UniqueName: \"kubernetes.io/projected/c5749667-46f8-48cc-b934-772352cf8bf8-kube-api-access-fz4vz\") pod \"dnsmasq-dns-86bc686f95-5b58c\" (UID: \"c5749667-46f8-48cc-b934-772352cf8bf8\") " pod="openstack/dnsmasq-dns-86bc686f95-5b58c" Oct 06 13:58:39 crc kubenswrapper[4757]: I1006 13:58:39.737895 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c5749667-46f8-48cc-b934-772352cf8bf8-dns-svc\") pod \"dnsmasq-dns-86bc686f95-5b58c\" (UID: \"c5749667-46f8-48cc-b934-772352cf8bf8\") " pod="openstack/dnsmasq-dns-86bc686f95-5b58c" Oct 06 13:58:39 crc kubenswrapper[4757]: I1006 13:58:39.737917 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c5749667-46f8-48cc-b934-772352cf8bf8-dns-swift-storage-0\") pod \"dnsmasq-dns-86bc686f95-5b58c\" (UID: \"c5749667-46f8-48cc-b934-772352cf8bf8\") " pod="openstack/dnsmasq-dns-86bc686f95-5b58c" Oct 06 13:58:39 crc kubenswrapper[4757]: I1006 13:58:39.738005 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/47ec18a3-c53e-48e4-aae7-f32b19af25a8-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"47ec18a3-c53e-48e4-aae7-f32b19af25a8\") " pod="openstack/nova-cell1-novncproxy-0" Oct 06 13:58:39 crc kubenswrapper[4757]: I1006 13:58:39.743863 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/47ec18a3-c53e-48e4-aae7-f32b19af25a8-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"47ec18a3-c53e-48e4-aae7-f32b19af25a8\") " pod="openstack/nova-cell1-novncproxy-0" Oct 06 13:58:39 crc kubenswrapper[4757]: I1006 13:58:39.746123 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c5749667-46f8-48cc-b934-772352cf8bf8-ovsdbserver-sb\") pod \"dnsmasq-dns-86bc686f95-5b58c\" (UID: \"c5749667-46f8-48cc-b934-772352cf8bf8\") " pod="openstack/dnsmasq-dns-86bc686f95-5b58c" Oct 06 13:58:39 crc kubenswrapper[4757]: I1006 13:58:39.748540 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c5749667-46f8-48cc-b934-772352cf8bf8-config\") pod \"dnsmasq-dns-86bc686f95-5b58c\" (UID: \"c5749667-46f8-48cc-b934-772352cf8bf8\") " pod="openstack/dnsmasq-dns-86bc686f95-5b58c" Oct 06 13:58:39 crc kubenswrapper[4757]: I1006 13:58:39.759190 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c5749667-46f8-48cc-b934-772352cf8bf8-dns-svc\") pod \"dnsmasq-dns-86bc686f95-5b58c\" (UID: \"c5749667-46f8-48cc-b934-772352cf8bf8\") " pod="openstack/dnsmasq-dns-86bc686f95-5b58c" Oct 06 13:58:39 crc kubenswrapper[4757]: I1006 13:58:39.760345 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c5749667-46f8-48cc-b934-772352cf8bf8-ovsdbserver-nb\") pod \"dnsmasq-dns-86bc686f95-5b58c\" (UID: \"c5749667-46f8-48cc-b934-772352cf8bf8\") " pod="openstack/dnsmasq-dns-86bc686f95-5b58c" Oct 06 13:58:39 crc kubenswrapper[4757]: I1006 13:58:39.763742 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/47ec18a3-c53e-48e4-aae7-f32b19af25a8-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"47ec18a3-c53e-48e4-aae7-f32b19af25a8\") " pod="openstack/nova-cell1-novncproxy-0" Oct 06 13:58:39 crc kubenswrapper[4757]: I1006 13:58:39.764308 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c5749667-46f8-48cc-b934-772352cf8bf8-dns-swift-storage-0\") pod \"dnsmasq-dns-86bc686f95-5b58c\" (UID: \"c5749667-46f8-48cc-b934-772352cf8bf8\") " pod="openstack/dnsmasq-dns-86bc686f95-5b58c" Oct 06 13:58:39 crc kubenswrapper[4757]: I1006 13:58:39.788892 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 06 13:58:39 crc kubenswrapper[4757]: I1006 13:58:39.792325 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fz4vz\" (UniqueName: \"kubernetes.io/projected/c5749667-46f8-48cc-b934-772352cf8bf8-kube-api-access-fz4vz\") pod \"dnsmasq-dns-86bc686f95-5b58c\" (UID: \"c5749667-46f8-48cc-b934-772352cf8bf8\") " pod="openstack/dnsmasq-dns-86bc686f95-5b58c" Oct 06 13:58:39 crc kubenswrapper[4757]: I1006 13:58:39.792710 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2l565\" (UniqueName: \"kubernetes.io/projected/47ec18a3-c53e-48e4-aae7-f32b19af25a8-kube-api-access-2l565\") pod \"nova-cell1-novncproxy-0\" (UID: \"47ec18a3-c53e-48e4-aae7-f32b19af25a8\") " pod="openstack/nova-cell1-novncproxy-0" Oct 06 13:58:39 crc kubenswrapper[4757]: I1006 13:58:39.797518 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86bc686f95-5b58c" Oct 06 13:58:39 crc kubenswrapper[4757]: I1006 13:58:39.811637 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 06 13:58:40 crc kubenswrapper[4757]: I1006 13:58:40.129805 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 06 13:58:40 crc kubenswrapper[4757]: W1006 13:58:40.142517 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcc57d075_eaca_4298_a76d_903d3fa3800b.slice/crio-464ecfbf7913be16b6b3c8c76b0f1f77ecae0a404a825bb5c12d096e1b0d4018 WatchSource:0}: Error finding container 464ecfbf7913be16b6b3c8c76b0f1f77ecae0a404a825bb5c12d096e1b0d4018: Status 404 returned error can't find the container with id 464ecfbf7913be16b6b3c8c76b0f1f77ecae0a404a825bb5c12d096e1b0d4018 Oct 06 13:58:40 crc kubenswrapper[4757]: I1006 13:58:40.142890 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 06 13:58:40 crc kubenswrapper[4757]: I1006 13:58:40.232133 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-nz4nw"] Oct 06 13:58:40 crc kubenswrapper[4757]: I1006 13:58:40.234804 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-nz4nw" Oct 06 13:58:40 crc kubenswrapper[4757]: I1006 13:58:40.240647 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Oct 06 13:58:40 crc kubenswrapper[4757]: I1006 13:58:40.242695 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Oct 06 13:58:40 crc kubenswrapper[4757]: I1006 13:58:40.253167 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-nz4nw"] Oct 06 13:58:40 crc kubenswrapper[4757]: I1006 13:58:40.268339 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-95qtm"] Oct 06 13:58:40 crc kubenswrapper[4757]: W1006 13:58:40.268541 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podccc640a0_578c_4bdd_9c71_789d3c821099.slice/crio-71339f7cc73c01642c680255844c4d51205d4af94fc65957f1ba7ad57f6d9753 WatchSource:0}: Error finding container 71339f7cc73c01642c680255844c4d51205d4af94fc65957f1ba7ad57f6d9753: Status 404 returned error can't find the container with id 71339f7cc73c01642c680255844c4d51205d4af94fc65957f1ba7ad57f6d9753 Oct 06 13:58:40 crc kubenswrapper[4757]: I1006 13:58:40.353260 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-58qvp\" (UniqueName: \"kubernetes.io/projected/48547398-bd84-4b69-b6a8-b3db5e7e32a9-kube-api-access-58qvp\") pod \"nova-cell1-conductor-db-sync-nz4nw\" (UID: \"48547398-bd84-4b69-b6a8-b3db5e7e32a9\") " pod="openstack/nova-cell1-conductor-db-sync-nz4nw" Oct 06 13:58:40 crc kubenswrapper[4757]: I1006 13:58:40.353331 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/48547398-bd84-4b69-b6a8-b3db5e7e32a9-config-data\") pod \"nova-cell1-conductor-db-sync-nz4nw\" (UID: \"48547398-bd84-4b69-b6a8-b3db5e7e32a9\") " pod="openstack/nova-cell1-conductor-db-sync-nz4nw" Oct 06 13:58:40 crc kubenswrapper[4757]: I1006 13:58:40.353416 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/48547398-bd84-4b69-b6a8-b3db5e7e32a9-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-nz4nw\" (UID: \"48547398-bd84-4b69-b6a8-b3db5e7e32a9\") " pod="openstack/nova-cell1-conductor-db-sync-nz4nw" Oct 06 13:58:40 crc kubenswrapper[4757]: I1006 13:58:40.353493 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/48547398-bd84-4b69-b6a8-b3db5e7e32a9-scripts\") pod \"nova-cell1-conductor-db-sync-nz4nw\" (UID: \"48547398-bd84-4b69-b6a8-b3db5e7e32a9\") " pod="openstack/nova-cell1-conductor-db-sync-nz4nw" Oct 06 13:58:40 crc kubenswrapper[4757]: I1006 13:58:40.394798 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 06 13:58:40 crc kubenswrapper[4757]: I1006 13:58:40.455448 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/48547398-bd84-4b69-b6a8-b3db5e7e32a9-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-nz4nw\" (UID: \"48547398-bd84-4b69-b6a8-b3db5e7e32a9\") " pod="openstack/nova-cell1-conductor-db-sync-nz4nw" Oct 06 13:58:40 crc kubenswrapper[4757]: I1006 13:58:40.455495 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/48547398-bd84-4b69-b6a8-b3db5e7e32a9-scripts\") pod \"nova-cell1-conductor-db-sync-nz4nw\" (UID: \"48547398-bd84-4b69-b6a8-b3db5e7e32a9\") " pod="openstack/nova-cell1-conductor-db-sync-nz4nw" Oct 06 13:58:40 crc kubenswrapper[4757]: I1006 13:58:40.455598 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-58qvp\" (UniqueName: \"kubernetes.io/projected/48547398-bd84-4b69-b6a8-b3db5e7e32a9-kube-api-access-58qvp\") pod \"nova-cell1-conductor-db-sync-nz4nw\" (UID: \"48547398-bd84-4b69-b6a8-b3db5e7e32a9\") " pod="openstack/nova-cell1-conductor-db-sync-nz4nw" Oct 06 13:58:40 crc kubenswrapper[4757]: I1006 13:58:40.455634 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/48547398-bd84-4b69-b6a8-b3db5e7e32a9-config-data\") pod \"nova-cell1-conductor-db-sync-nz4nw\" (UID: \"48547398-bd84-4b69-b6a8-b3db5e7e32a9\") " pod="openstack/nova-cell1-conductor-db-sync-nz4nw" Oct 06 13:58:40 crc kubenswrapper[4757]: I1006 13:58:40.463229 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/48547398-bd84-4b69-b6a8-b3db5e7e32a9-config-data\") pod \"nova-cell1-conductor-db-sync-nz4nw\" (UID: \"48547398-bd84-4b69-b6a8-b3db5e7e32a9\") " pod="openstack/nova-cell1-conductor-db-sync-nz4nw" Oct 06 13:58:40 crc kubenswrapper[4757]: I1006 13:58:40.464787 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/48547398-bd84-4b69-b6a8-b3db5e7e32a9-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-nz4nw\" (UID: \"48547398-bd84-4b69-b6a8-b3db5e7e32a9\") " pod="openstack/nova-cell1-conductor-db-sync-nz4nw" Oct 06 13:58:40 crc kubenswrapper[4757]: I1006 13:58:40.465336 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/48547398-bd84-4b69-b6a8-b3db5e7e32a9-scripts\") pod \"nova-cell1-conductor-db-sync-nz4nw\" (UID: \"48547398-bd84-4b69-b6a8-b3db5e7e32a9\") " pod="openstack/nova-cell1-conductor-db-sync-nz4nw" Oct 06 13:58:40 crc kubenswrapper[4757]: I1006 13:58:40.474063 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-58qvp\" (UniqueName: \"kubernetes.io/projected/48547398-bd84-4b69-b6a8-b3db5e7e32a9-kube-api-access-58qvp\") pod \"nova-cell1-conductor-db-sync-nz4nw\" (UID: \"48547398-bd84-4b69-b6a8-b3db5e7e32a9\") " pod="openstack/nova-cell1-conductor-db-sync-nz4nw" Oct 06 13:58:40 crc kubenswrapper[4757]: W1006 13:58:40.502152 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0629d26d_a456_477c_9adf_54885b0ff36f.slice/crio-7d8d29650dacf7f04e90e80aeeecb75e87c46a38adfbd1c6a8519ba3210d06ac WatchSource:0}: Error finding container 7d8d29650dacf7f04e90e80aeeecb75e87c46a38adfbd1c6a8519ba3210d06ac: Status 404 returned error can't find the container with id 7d8d29650dacf7f04e90e80aeeecb75e87c46a38adfbd1c6a8519ba3210d06ac Oct 06 13:58:40 crc kubenswrapper[4757]: I1006 13:58:40.510446 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 06 13:58:40 crc kubenswrapper[4757]: I1006 13:58:40.520811 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-86bc686f95-5b58c"] Oct 06 13:58:40 crc kubenswrapper[4757]: I1006 13:58:40.563638 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-nz4nw" Oct 06 13:58:41 crc kubenswrapper[4757]: I1006 13:58:41.054457 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-nz4nw"] Oct 06 13:58:41 crc kubenswrapper[4757]: W1006 13:58:41.057845 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod48547398_bd84_4b69_b6a8_b3db5e7e32a9.slice/crio-dc726c7392b3cc69f9193911609ac418b0c8f39a93ac2b51f70075811d46c050 WatchSource:0}: Error finding container dc726c7392b3cc69f9193911609ac418b0c8f39a93ac2b51f70075811d46c050: Status 404 returned error can't find the container with id dc726c7392b3cc69f9193911609ac418b0c8f39a93ac2b51f70075811d46c050 Oct 06 13:58:41 crc kubenswrapper[4757]: I1006 13:58:41.116392 4757 generic.go:334] "Generic (PLEG): container finished" podID="c5749667-46f8-48cc-b934-772352cf8bf8" containerID="6de0306454b4cc8935eca0d925d6b5d1b637f60c8d4a7c27c8958a30520dfb2f" exitCode=0 Oct 06 13:58:41 crc kubenswrapper[4757]: I1006 13:58:41.116547 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86bc686f95-5b58c" event={"ID":"c5749667-46f8-48cc-b934-772352cf8bf8","Type":"ContainerDied","Data":"6de0306454b4cc8935eca0d925d6b5d1b637f60c8d4a7c27c8958a30520dfb2f"} Oct 06 13:58:41 crc kubenswrapper[4757]: I1006 13:58:41.116609 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86bc686f95-5b58c" event={"ID":"c5749667-46f8-48cc-b934-772352cf8bf8","Type":"ContainerStarted","Data":"ba996403da0d919881e318aa6c8a3713413499e1caa7eaf03f3d9758abdaf492"} Oct 06 13:58:41 crc kubenswrapper[4757]: I1006 13:58:41.122989 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"0629d26d-a456-477c-9adf-54885b0ff36f","Type":"ContainerStarted","Data":"7d8d29650dacf7f04e90e80aeeecb75e87c46a38adfbd1c6a8519ba3210d06ac"} Oct 06 13:58:41 crc kubenswrapper[4757]: I1006 13:58:41.125179 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-nz4nw" event={"ID":"48547398-bd84-4b69-b6a8-b3db5e7e32a9","Type":"ContainerStarted","Data":"dc726c7392b3cc69f9193911609ac418b0c8f39a93ac2b51f70075811d46c050"} Oct 06 13:58:41 crc kubenswrapper[4757]: I1006 13:58:41.126435 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"47ec18a3-c53e-48e4-aae7-f32b19af25a8","Type":"ContainerStarted","Data":"2aca52d067d905539471d3db63d830734934208b4c08fcdab8bb89b2a73abcc6"} Oct 06 13:58:41 crc kubenswrapper[4757]: I1006 13:58:41.129510 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-95qtm" event={"ID":"ccc640a0-578c-4bdd-9c71-789d3c821099","Type":"ContainerStarted","Data":"85d511bf0638c5d866a7350e35a676d505d59153aaeb1e1c9cb119aa200d5d73"} Oct 06 13:58:41 crc kubenswrapper[4757]: I1006 13:58:41.129678 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-95qtm" event={"ID":"ccc640a0-578c-4bdd-9c71-789d3c821099","Type":"ContainerStarted","Data":"71339f7cc73c01642c680255844c4d51205d4af94fc65957f1ba7ad57f6d9753"} Oct 06 13:58:41 crc kubenswrapper[4757]: I1006 13:58:41.130622 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"cc57d075-eaca-4298-a76d-903d3fa3800b","Type":"ContainerStarted","Data":"464ecfbf7913be16b6b3c8c76b0f1f77ecae0a404a825bb5c12d096e1b0d4018"} Oct 06 13:58:41 crc kubenswrapper[4757]: I1006 13:58:41.131818 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"df4d9101-39da-4876-9509-ecf7563515ee","Type":"ContainerStarted","Data":"a63cddbe2376c7fb4ff28d81bc55c6f69f1c88536f365515e5e7ac4ae4391092"} Oct 06 13:58:41 crc kubenswrapper[4757]: I1006 13:58:41.162875 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-95qtm" podStartSLOduration=2.16285475 podStartE2EDuration="2.16285475s" podCreationTimestamp="2025-10-06 13:58:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:58:41.156185177 +0000 UTC m=+1209.653503724" watchObservedRunningTime="2025-10-06 13:58:41.16285475 +0000 UTC m=+1209.660173287" Oct 06 13:58:42 crc kubenswrapper[4757]: I1006 13:58:42.150224 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86bc686f95-5b58c" event={"ID":"c5749667-46f8-48cc-b934-772352cf8bf8","Type":"ContainerStarted","Data":"b292c2f5c2a63c5dba9bae42fd7e9d0f6f92e7620da7b00639636430e7ae9460"} Oct 06 13:58:42 crc kubenswrapper[4757]: I1006 13:58:42.151174 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-86bc686f95-5b58c" Oct 06 13:58:42 crc kubenswrapper[4757]: I1006 13:58:42.156979 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-nz4nw" event={"ID":"48547398-bd84-4b69-b6a8-b3db5e7e32a9","Type":"ContainerStarted","Data":"ead89b6eb7ef2cdb8afe050d91ea530c7f417212cefe4bec8bab853834ee9629"} Oct 06 13:58:42 crc kubenswrapper[4757]: I1006 13:58:42.169880 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-86bc686f95-5b58c" podStartSLOduration=3.169864863 podStartE2EDuration="3.169864863s" podCreationTimestamp="2025-10-06 13:58:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:58:42.166258837 +0000 UTC m=+1210.663577374" watchObservedRunningTime="2025-10-06 13:58:42.169864863 +0000 UTC m=+1210.667183400" Oct 06 13:58:42 crc kubenswrapper[4757]: I1006 13:58:42.195042 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-nz4nw" podStartSLOduration=2.195023018 podStartE2EDuration="2.195023018s" podCreationTimestamp="2025-10-06 13:58:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:58:42.186153674 +0000 UTC m=+1210.683472251" watchObservedRunningTime="2025-10-06 13:58:42.195023018 +0000 UTC m=+1210.692341555" Oct 06 13:58:43 crc kubenswrapper[4757]: I1006 13:58:43.244940 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 06 13:58:43 crc kubenswrapper[4757]: I1006 13:58:43.299486 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 06 13:58:44 crc kubenswrapper[4757]: I1006 13:58:44.188281 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="47ec18a3-c53e-48e4-aae7-f32b19af25a8" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://0e04c1a2f75660ed2a2220835059f183e347a34be89e740317c9ac93195c0306" gracePeriod=30 Oct 06 13:58:44 crc kubenswrapper[4757]: I1006 13:58:44.191470 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"0629d26d-a456-477c-9adf-54885b0ff36f","Type":"ContainerStarted","Data":"944183a571c7c2fc6c984f92c3b0afc82536909cb0fa41c943c8062170e065bb"} Oct 06 13:58:44 crc kubenswrapper[4757]: I1006 13:58:44.191509 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"47ec18a3-c53e-48e4-aae7-f32b19af25a8","Type":"ContainerStarted","Data":"0e04c1a2f75660ed2a2220835059f183e347a34be89e740317c9ac93195c0306"} Oct 06 13:58:44 crc kubenswrapper[4757]: I1006 13:58:44.194625 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"cc57d075-eaca-4298-a76d-903d3fa3800b","Type":"ContainerStarted","Data":"57a99ae497bd37615f9b50961f0a594e23bcfd00f8c302548eee047e7257fee2"} Oct 06 13:58:44 crc kubenswrapper[4757]: I1006 13:58:44.194654 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"cc57d075-eaca-4298-a76d-903d3fa3800b","Type":"ContainerStarted","Data":"2e60d3baa4da8dd6f518007d4f04dc3a8301c5bb3f84ed83aaf428eddf0f5fa9"} Oct 06 13:58:44 crc kubenswrapper[4757]: I1006 13:58:44.198515 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"df4d9101-39da-4876-9509-ecf7563515ee","Type":"ContainerStarted","Data":"72330b89a37b50d8a567053f2ea6aab573c75d5b5325a80c47edb9a5bb9b818b"} Oct 06 13:58:44 crc kubenswrapper[4757]: I1006 13:58:44.198561 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"df4d9101-39da-4876-9509-ecf7563515ee","Type":"ContainerStarted","Data":"8cf0f6555a287372b619d4510c67cd4accbbbb4158a5b743bf84e86fddb271e6"} Oct 06 13:58:44 crc kubenswrapper[4757]: I1006 13:58:44.198593 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="df4d9101-39da-4876-9509-ecf7563515ee" containerName="nova-metadata-log" containerID="cri-o://8cf0f6555a287372b619d4510c67cd4accbbbb4158a5b743bf84e86fddb271e6" gracePeriod=30 Oct 06 13:58:44 crc kubenswrapper[4757]: I1006 13:58:44.198646 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="df4d9101-39da-4876-9509-ecf7563515ee" containerName="nova-metadata-metadata" containerID="cri-o://72330b89a37b50d8a567053f2ea6aab573c75d5b5325a80c47edb9a5bb9b818b" gracePeriod=30 Oct 06 13:58:44 crc kubenswrapper[4757]: I1006 13:58:44.226859 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.607290107 podStartE2EDuration="5.226839151s" podCreationTimestamp="2025-10-06 13:58:39 +0000 UTC" firstStartedPulling="2025-10-06 13:58:40.503884741 +0000 UTC m=+1209.001203268" lastFinishedPulling="2025-10-06 13:58:43.123433775 +0000 UTC m=+1211.620752312" observedRunningTime="2025-10-06 13:58:44.21836526 +0000 UTC m=+1212.715683817" watchObservedRunningTime="2025-10-06 13:58:44.226839151 +0000 UTC m=+1212.724157698" Oct 06 13:58:44 crc kubenswrapper[4757]: I1006 13:58:44.243267 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.536747051 podStartE2EDuration="5.243247286s" podCreationTimestamp="2025-10-06 13:58:39 +0000 UTC" firstStartedPulling="2025-10-06 13:58:40.414517223 +0000 UTC m=+1208.911835760" lastFinishedPulling="2025-10-06 13:58:43.121017458 +0000 UTC m=+1211.618335995" observedRunningTime="2025-10-06 13:58:44.235424786 +0000 UTC m=+1212.732743333" watchObservedRunningTime="2025-10-06 13:58:44.243247286 +0000 UTC m=+1212.740565843" Oct 06 13:58:44 crc kubenswrapper[4757]: I1006 13:58:44.261768 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.277179797 podStartE2EDuration="5.261751178s" podCreationTimestamp="2025-10-06 13:58:39 +0000 UTC" firstStartedPulling="2025-10-06 13:58:40.137059957 +0000 UTC m=+1208.634378494" lastFinishedPulling="2025-10-06 13:58:43.121631338 +0000 UTC m=+1211.618949875" observedRunningTime="2025-10-06 13:58:44.258864415 +0000 UTC m=+1212.756182972" watchObservedRunningTime="2025-10-06 13:58:44.261751178 +0000 UTC m=+1212.759069715" Oct 06 13:58:44 crc kubenswrapper[4757]: I1006 13:58:44.278772 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.307874449 podStartE2EDuration="5.278754762s" podCreationTimestamp="2025-10-06 13:58:39 +0000 UTC" firstStartedPulling="2025-10-06 13:58:40.150139555 +0000 UTC m=+1208.647458092" lastFinishedPulling="2025-10-06 13:58:43.121019868 +0000 UTC m=+1211.618338405" observedRunningTime="2025-10-06 13:58:44.275688273 +0000 UTC m=+1212.773006810" watchObservedRunningTime="2025-10-06 13:58:44.278754762 +0000 UTC m=+1212.776073289" Oct 06 13:58:44 crc kubenswrapper[4757]: I1006 13:58:44.629357 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 06 13:58:44 crc kubenswrapper[4757]: I1006 13:58:44.629771 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 06 13:58:44 crc kubenswrapper[4757]: I1006 13:58:44.790430 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Oct 06 13:58:44 crc kubenswrapper[4757]: I1006 13:58:44.813976 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Oct 06 13:58:44 crc kubenswrapper[4757]: I1006 13:58:44.827874 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 06 13:58:44 crc kubenswrapper[4757]: I1006 13:58:44.947860 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/df4d9101-39da-4876-9509-ecf7563515ee-config-data\") pod \"df4d9101-39da-4876-9509-ecf7563515ee\" (UID: \"df4d9101-39da-4876-9509-ecf7563515ee\") " Oct 06 13:58:44 crc kubenswrapper[4757]: I1006 13:58:44.947920 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/df4d9101-39da-4876-9509-ecf7563515ee-logs\") pod \"df4d9101-39da-4876-9509-ecf7563515ee\" (UID: \"df4d9101-39da-4876-9509-ecf7563515ee\") " Oct 06 13:58:44 crc kubenswrapper[4757]: I1006 13:58:44.948217 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jdmm4\" (UniqueName: \"kubernetes.io/projected/df4d9101-39da-4876-9509-ecf7563515ee-kube-api-access-jdmm4\") pod \"df4d9101-39da-4876-9509-ecf7563515ee\" (UID: \"df4d9101-39da-4876-9509-ecf7563515ee\") " Oct 06 13:58:44 crc kubenswrapper[4757]: I1006 13:58:44.948266 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/df4d9101-39da-4876-9509-ecf7563515ee-combined-ca-bundle\") pod \"df4d9101-39da-4876-9509-ecf7563515ee\" (UID: \"df4d9101-39da-4876-9509-ecf7563515ee\") " Oct 06 13:58:44 crc kubenswrapper[4757]: I1006 13:58:44.948312 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/df4d9101-39da-4876-9509-ecf7563515ee-logs" (OuterVolumeSpecName: "logs") pod "df4d9101-39da-4876-9509-ecf7563515ee" (UID: "df4d9101-39da-4876-9509-ecf7563515ee"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 13:58:44 crc kubenswrapper[4757]: I1006 13:58:44.948659 4757 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/df4d9101-39da-4876-9509-ecf7563515ee-logs\") on node \"crc\" DevicePath \"\"" Oct 06 13:58:44 crc kubenswrapper[4757]: I1006 13:58:44.954238 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/df4d9101-39da-4876-9509-ecf7563515ee-kube-api-access-jdmm4" (OuterVolumeSpecName: "kube-api-access-jdmm4") pod "df4d9101-39da-4876-9509-ecf7563515ee" (UID: "df4d9101-39da-4876-9509-ecf7563515ee"). InnerVolumeSpecName "kube-api-access-jdmm4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:58:44 crc kubenswrapper[4757]: I1006 13:58:44.976733 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/df4d9101-39da-4876-9509-ecf7563515ee-config-data" (OuterVolumeSpecName: "config-data") pod "df4d9101-39da-4876-9509-ecf7563515ee" (UID: "df4d9101-39da-4876-9509-ecf7563515ee"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:58:44 crc kubenswrapper[4757]: I1006 13:58:44.978528 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/df4d9101-39da-4876-9509-ecf7563515ee-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "df4d9101-39da-4876-9509-ecf7563515ee" (UID: "df4d9101-39da-4876-9509-ecf7563515ee"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:58:45 crc kubenswrapper[4757]: I1006 13:58:45.050712 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jdmm4\" (UniqueName: \"kubernetes.io/projected/df4d9101-39da-4876-9509-ecf7563515ee-kube-api-access-jdmm4\") on node \"crc\" DevicePath \"\"" Oct 06 13:58:45 crc kubenswrapper[4757]: I1006 13:58:45.051056 4757 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/df4d9101-39da-4876-9509-ecf7563515ee-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 06 13:58:45 crc kubenswrapper[4757]: I1006 13:58:45.051069 4757 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/df4d9101-39da-4876-9509-ecf7563515ee-config-data\") on node \"crc\" DevicePath \"\"" Oct 06 13:58:45 crc kubenswrapper[4757]: I1006 13:58:45.211042 4757 generic.go:334] "Generic (PLEG): container finished" podID="df4d9101-39da-4876-9509-ecf7563515ee" containerID="72330b89a37b50d8a567053f2ea6aab573c75d5b5325a80c47edb9a5bb9b818b" exitCode=0 Oct 06 13:58:45 crc kubenswrapper[4757]: I1006 13:58:45.211116 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 06 13:58:45 crc kubenswrapper[4757]: I1006 13:58:45.211144 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"df4d9101-39da-4876-9509-ecf7563515ee","Type":"ContainerDied","Data":"72330b89a37b50d8a567053f2ea6aab573c75d5b5325a80c47edb9a5bb9b818b"} Oct 06 13:58:45 crc kubenswrapper[4757]: I1006 13:58:45.211187 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"df4d9101-39da-4876-9509-ecf7563515ee","Type":"ContainerDied","Data":"8cf0f6555a287372b619d4510c67cd4accbbbb4158a5b743bf84e86fddb271e6"} Oct 06 13:58:45 crc kubenswrapper[4757]: I1006 13:58:45.211213 4757 scope.go:117] "RemoveContainer" containerID="72330b89a37b50d8a567053f2ea6aab573c75d5b5325a80c47edb9a5bb9b818b" Oct 06 13:58:45 crc kubenswrapper[4757]: I1006 13:58:45.211119 4757 generic.go:334] "Generic (PLEG): container finished" podID="df4d9101-39da-4876-9509-ecf7563515ee" containerID="8cf0f6555a287372b619d4510c67cd4accbbbb4158a5b743bf84e86fddb271e6" exitCode=143 Oct 06 13:58:45 crc kubenswrapper[4757]: I1006 13:58:45.212025 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"df4d9101-39da-4876-9509-ecf7563515ee","Type":"ContainerDied","Data":"a63cddbe2376c7fb4ff28d81bc55c6f69f1c88536f365515e5e7ac4ae4391092"} Oct 06 13:58:45 crc kubenswrapper[4757]: I1006 13:58:45.254627 4757 scope.go:117] "RemoveContainer" containerID="8cf0f6555a287372b619d4510c67cd4accbbbb4158a5b743bf84e86fddb271e6" Oct 06 13:58:45 crc kubenswrapper[4757]: I1006 13:58:45.256741 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 06 13:58:45 crc kubenswrapper[4757]: I1006 13:58:45.275633 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Oct 06 13:58:45 crc kubenswrapper[4757]: I1006 13:58:45.289336 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Oct 06 13:58:45 crc kubenswrapper[4757]: E1006 13:58:45.289811 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="df4d9101-39da-4876-9509-ecf7563515ee" containerName="nova-metadata-metadata" Oct 06 13:58:45 crc kubenswrapper[4757]: I1006 13:58:45.289827 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="df4d9101-39da-4876-9509-ecf7563515ee" containerName="nova-metadata-metadata" Oct 06 13:58:45 crc kubenswrapper[4757]: E1006 13:58:45.289843 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="df4d9101-39da-4876-9509-ecf7563515ee" containerName="nova-metadata-log" Oct 06 13:58:45 crc kubenswrapper[4757]: I1006 13:58:45.289849 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="df4d9101-39da-4876-9509-ecf7563515ee" containerName="nova-metadata-log" Oct 06 13:58:45 crc kubenswrapper[4757]: I1006 13:58:45.290005 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="df4d9101-39da-4876-9509-ecf7563515ee" containerName="nova-metadata-metadata" Oct 06 13:58:45 crc kubenswrapper[4757]: I1006 13:58:45.290033 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="df4d9101-39da-4876-9509-ecf7563515ee" containerName="nova-metadata-log" Oct 06 13:58:45 crc kubenswrapper[4757]: I1006 13:58:45.291024 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 06 13:58:45 crc kubenswrapper[4757]: I1006 13:58:45.293217 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Oct 06 13:58:45 crc kubenswrapper[4757]: I1006 13:58:45.293334 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Oct 06 13:58:45 crc kubenswrapper[4757]: I1006 13:58:45.302863 4757 scope.go:117] "RemoveContainer" containerID="72330b89a37b50d8a567053f2ea6aab573c75d5b5325a80c47edb9a5bb9b818b" Oct 06 13:58:45 crc kubenswrapper[4757]: E1006 13:58:45.307717 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"72330b89a37b50d8a567053f2ea6aab573c75d5b5325a80c47edb9a5bb9b818b\": container with ID starting with 72330b89a37b50d8a567053f2ea6aab573c75d5b5325a80c47edb9a5bb9b818b not found: ID does not exist" containerID="72330b89a37b50d8a567053f2ea6aab573c75d5b5325a80c47edb9a5bb9b818b" Oct 06 13:58:45 crc kubenswrapper[4757]: I1006 13:58:45.307759 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"72330b89a37b50d8a567053f2ea6aab573c75d5b5325a80c47edb9a5bb9b818b"} err="failed to get container status \"72330b89a37b50d8a567053f2ea6aab573c75d5b5325a80c47edb9a5bb9b818b\": rpc error: code = NotFound desc = could not find container \"72330b89a37b50d8a567053f2ea6aab573c75d5b5325a80c47edb9a5bb9b818b\": container with ID starting with 72330b89a37b50d8a567053f2ea6aab573c75d5b5325a80c47edb9a5bb9b818b not found: ID does not exist" Oct 06 13:58:45 crc kubenswrapper[4757]: I1006 13:58:45.307783 4757 scope.go:117] "RemoveContainer" containerID="8cf0f6555a287372b619d4510c67cd4accbbbb4158a5b743bf84e86fddb271e6" Oct 06 13:58:45 crc kubenswrapper[4757]: E1006 13:58:45.308473 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8cf0f6555a287372b619d4510c67cd4accbbbb4158a5b743bf84e86fddb271e6\": container with ID starting with 8cf0f6555a287372b619d4510c67cd4accbbbb4158a5b743bf84e86fddb271e6 not found: ID does not exist" containerID="8cf0f6555a287372b619d4510c67cd4accbbbb4158a5b743bf84e86fddb271e6" Oct 06 13:58:45 crc kubenswrapper[4757]: I1006 13:58:45.308507 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8cf0f6555a287372b619d4510c67cd4accbbbb4158a5b743bf84e86fddb271e6"} err="failed to get container status \"8cf0f6555a287372b619d4510c67cd4accbbbb4158a5b743bf84e86fddb271e6\": rpc error: code = NotFound desc = could not find container \"8cf0f6555a287372b619d4510c67cd4accbbbb4158a5b743bf84e86fddb271e6\": container with ID starting with 8cf0f6555a287372b619d4510c67cd4accbbbb4158a5b743bf84e86fddb271e6 not found: ID does not exist" Oct 06 13:58:45 crc kubenswrapper[4757]: I1006 13:58:45.308529 4757 scope.go:117] "RemoveContainer" containerID="72330b89a37b50d8a567053f2ea6aab573c75d5b5325a80c47edb9a5bb9b818b" Oct 06 13:58:45 crc kubenswrapper[4757]: I1006 13:58:45.310546 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"72330b89a37b50d8a567053f2ea6aab573c75d5b5325a80c47edb9a5bb9b818b"} err="failed to get container status \"72330b89a37b50d8a567053f2ea6aab573c75d5b5325a80c47edb9a5bb9b818b\": rpc error: code = NotFound desc = could not find container \"72330b89a37b50d8a567053f2ea6aab573c75d5b5325a80c47edb9a5bb9b818b\": container with ID starting with 72330b89a37b50d8a567053f2ea6aab573c75d5b5325a80c47edb9a5bb9b818b not found: ID does not exist" Oct 06 13:58:45 crc kubenswrapper[4757]: I1006 13:58:45.310594 4757 scope.go:117] "RemoveContainer" containerID="8cf0f6555a287372b619d4510c67cd4accbbbb4158a5b743bf84e86fddb271e6" Oct 06 13:58:45 crc kubenswrapper[4757]: I1006 13:58:45.312997 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8cf0f6555a287372b619d4510c67cd4accbbbb4158a5b743bf84e86fddb271e6"} err="failed to get container status \"8cf0f6555a287372b619d4510c67cd4accbbbb4158a5b743bf84e86fddb271e6\": rpc error: code = NotFound desc = could not find container \"8cf0f6555a287372b619d4510c67cd4accbbbb4158a5b743bf84e86fddb271e6\": container with ID starting with 8cf0f6555a287372b619d4510c67cd4accbbbb4158a5b743bf84e86fddb271e6 not found: ID does not exist" Oct 06 13:58:45 crc kubenswrapper[4757]: I1006 13:58:45.318715 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 06 13:58:45 crc kubenswrapper[4757]: I1006 13:58:45.466873 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ea1b03a8-41fc-4c8f-89a4-42ee48f68b4c-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"ea1b03a8-41fc-4c8f-89a4-42ee48f68b4c\") " pod="openstack/nova-metadata-0" Oct 06 13:58:45 crc kubenswrapper[4757]: I1006 13:58:45.466982 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ea1b03a8-41fc-4c8f-89a4-42ee48f68b4c-logs\") pod \"nova-metadata-0\" (UID: \"ea1b03a8-41fc-4c8f-89a4-42ee48f68b4c\") " pod="openstack/nova-metadata-0" Oct 06 13:58:45 crc kubenswrapper[4757]: I1006 13:58:45.467167 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/ea1b03a8-41fc-4c8f-89a4-42ee48f68b4c-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"ea1b03a8-41fc-4c8f-89a4-42ee48f68b4c\") " pod="openstack/nova-metadata-0" Oct 06 13:58:45 crc kubenswrapper[4757]: I1006 13:58:45.467340 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-96rtj\" (UniqueName: \"kubernetes.io/projected/ea1b03a8-41fc-4c8f-89a4-42ee48f68b4c-kube-api-access-96rtj\") pod \"nova-metadata-0\" (UID: \"ea1b03a8-41fc-4c8f-89a4-42ee48f68b4c\") " pod="openstack/nova-metadata-0" Oct 06 13:58:45 crc kubenswrapper[4757]: I1006 13:58:45.467495 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ea1b03a8-41fc-4c8f-89a4-42ee48f68b4c-config-data\") pod \"nova-metadata-0\" (UID: \"ea1b03a8-41fc-4c8f-89a4-42ee48f68b4c\") " pod="openstack/nova-metadata-0" Oct 06 13:58:45 crc kubenswrapper[4757]: I1006 13:58:45.569374 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ea1b03a8-41fc-4c8f-89a4-42ee48f68b4c-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"ea1b03a8-41fc-4c8f-89a4-42ee48f68b4c\") " pod="openstack/nova-metadata-0" Oct 06 13:58:45 crc kubenswrapper[4757]: I1006 13:58:45.569523 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ea1b03a8-41fc-4c8f-89a4-42ee48f68b4c-logs\") pod \"nova-metadata-0\" (UID: \"ea1b03a8-41fc-4c8f-89a4-42ee48f68b4c\") " pod="openstack/nova-metadata-0" Oct 06 13:58:45 crc kubenswrapper[4757]: I1006 13:58:45.569630 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/ea1b03a8-41fc-4c8f-89a4-42ee48f68b4c-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"ea1b03a8-41fc-4c8f-89a4-42ee48f68b4c\") " pod="openstack/nova-metadata-0" Oct 06 13:58:45 crc kubenswrapper[4757]: I1006 13:58:45.569718 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-96rtj\" (UniqueName: \"kubernetes.io/projected/ea1b03a8-41fc-4c8f-89a4-42ee48f68b4c-kube-api-access-96rtj\") pod \"nova-metadata-0\" (UID: \"ea1b03a8-41fc-4c8f-89a4-42ee48f68b4c\") " pod="openstack/nova-metadata-0" Oct 06 13:58:45 crc kubenswrapper[4757]: I1006 13:58:45.569768 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ea1b03a8-41fc-4c8f-89a4-42ee48f68b4c-config-data\") pod \"nova-metadata-0\" (UID: \"ea1b03a8-41fc-4c8f-89a4-42ee48f68b4c\") " pod="openstack/nova-metadata-0" Oct 06 13:58:45 crc kubenswrapper[4757]: I1006 13:58:45.570332 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ea1b03a8-41fc-4c8f-89a4-42ee48f68b4c-logs\") pod \"nova-metadata-0\" (UID: \"ea1b03a8-41fc-4c8f-89a4-42ee48f68b4c\") " pod="openstack/nova-metadata-0" Oct 06 13:58:45 crc kubenswrapper[4757]: I1006 13:58:45.583046 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ea1b03a8-41fc-4c8f-89a4-42ee48f68b4c-config-data\") pod \"nova-metadata-0\" (UID: \"ea1b03a8-41fc-4c8f-89a4-42ee48f68b4c\") " pod="openstack/nova-metadata-0" Oct 06 13:58:45 crc kubenswrapper[4757]: I1006 13:58:45.585234 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/ea1b03a8-41fc-4c8f-89a4-42ee48f68b4c-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"ea1b03a8-41fc-4c8f-89a4-42ee48f68b4c\") " pod="openstack/nova-metadata-0" Oct 06 13:58:45 crc kubenswrapper[4757]: I1006 13:58:45.585532 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-96rtj\" (UniqueName: \"kubernetes.io/projected/ea1b03a8-41fc-4c8f-89a4-42ee48f68b4c-kube-api-access-96rtj\") pod \"nova-metadata-0\" (UID: \"ea1b03a8-41fc-4c8f-89a4-42ee48f68b4c\") " pod="openstack/nova-metadata-0" Oct 06 13:58:45 crc kubenswrapper[4757]: I1006 13:58:45.586568 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ea1b03a8-41fc-4c8f-89a4-42ee48f68b4c-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"ea1b03a8-41fc-4c8f-89a4-42ee48f68b4c\") " pod="openstack/nova-metadata-0" Oct 06 13:58:45 crc kubenswrapper[4757]: I1006 13:58:45.623918 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 06 13:58:46 crc kubenswrapper[4757]: I1006 13:58:46.112776 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 06 13:58:46 crc kubenswrapper[4757]: I1006 13:58:46.201272 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="df4d9101-39da-4876-9509-ecf7563515ee" path="/var/lib/kubelet/pods/df4d9101-39da-4876-9509-ecf7563515ee/volumes" Oct 06 13:58:46 crc kubenswrapper[4757]: I1006 13:58:46.222280 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"ea1b03a8-41fc-4c8f-89a4-42ee48f68b4c","Type":"ContainerStarted","Data":"ad36a74cf7cca1ad2a2a573cede5dea8d982b57d2c320a9cfa11eee54c9b1c42"} Oct 06 13:58:47 crc kubenswrapper[4757]: I1006 13:58:47.237860 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"ea1b03a8-41fc-4c8f-89a4-42ee48f68b4c","Type":"ContainerStarted","Data":"e1b189f0231a5a079e7ff362b9813db280422d514bb10a9167c12451f5bd3d0e"} Oct 06 13:58:47 crc kubenswrapper[4757]: I1006 13:58:47.238313 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"ea1b03a8-41fc-4c8f-89a4-42ee48f68b4c","Type":"ContainerStarted","Data":"2fc4e28bb86698c3138c14c68941417defbacc09567daaf6ea0ee191ce2dd5a2"} Oct 06 13:58:47 crc kubenswrapper[4757]: I1006 13:58:47.279221 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.27918879 podStartE2EDuration="2.27918879s" podCreationTimestamp="2025-10-06 13:58:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:58:47.263399105 +0000 UTC m=+1215.760717642" watchObservedRunningTime="2025-10-06 13:58:47.27918879 +0000 UTC m=+1215.776507367" Oct 06 13:58:48 crc kubenswrapper[4757]: I1006 13:58:48.247996 4757 generic.go:334] "Generic (PLEG): container finished" podID="48547398-bd84-4b69-b6a8-b3db5e7e32a9" containerID="ead89b6eb7ef2cdb8afe050d91ea530c7f417212cefe4bec8bab853834ee9629" exitCode=0 Oct 06 13:58:48 crc kubenswrapper[4757]: I1006 13:58:48.248072 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-nz4nw" event={"ID":"48547398-bd84-4b69-b6a8-b3db5e7e32a9","Type":"ContainerDied","Data":"ead89b6eb7ef2cdb8afe050d91ea530c7f417212cefe4bec8bab853834ee9629"} Oct 06 13:58:49 crc kubenswrapper[4757]: I1006 13:58:49.260183 4757 generic.go:334] "Generic (PLEG): container finished" podID="ccc640a0-578c-4bdd-9c71-789d3c821099" containerID="85d511bf0638c5d866a7350e35a676d505d59153aaeb1e1c9cb119aa200d5d73" exitCode=0 Oct 06 13:58:49 crc kubenswrapper[4757]: I1006 13:58:49.260247 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-95qtm" event={"ID":"ccc640a0-578c-4bdd-9c71-789d3c821099","Type":"ContainerDied","Data":"85d511bf0638c5d866a7350e35a676d505d59153aaeb1e1c9cb119aa200d5d73"} Oct 06 13:58:49 crc kubenswrapper[4757]: I1006 13:58:49.612223 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 06 13:58:49 crc kubenswrapper[4757]: I1006 13:58:49.612268 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 06 13:58:49 crc kubenswrapper[4757]: I1006 13:58:49.699552 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-nz4nw" Oct 06 13:58:49 crc kubenswrapper[4757]: I1006 13:58:49.790184 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Oct 06 13:58:49 crc kubenswrapper[4757]: I1006 13:58:49.799218 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-86bc686f95-5b58c" Oct 06 13:58:49 crc kubenswrapper[4757]: I1006 13:58:49.843775 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Oct 06 13:58:49 crc kubenswrapper[4757]: I1006 13:58:49.872275 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-67bc848c5c-dclqc"] Oct 06 13:58:49 crc kubenswrapper[4757]: I1006 13:58:49.872530 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-67bc848c5c-dclqc" podUID="2ede69c8-f24c-4bf1-82b6-d28d349fb686" containerName="dnsmasq-dns" containerID="cri-o://b98bcddeb47dcac8014d7c6dfc9cc3eca9ab1c8761ddfa47739a393cb7f1ea2b" gracePeriod=10 Oct 06 13:58:49 crc kubenswrapper[4757]: I1006 13:58:49.883869 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/48547398-bd84-4b69-b6a8-b3db5e7e32a9-combined-ca-bundle\") pod \"48547398-bd84-4b69-b6a8-b3db5e7e32a9\" (UID: \"48547398-bd84-4b69-b6a8-b3db5e7e32a9\") " Oct 06 13:58:49 crc kubenswrapper[4757]: I1006 13:58:49.883987 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/48547398-bd84-4b69-b6a8-b3db5e7e32a9-scripts\") pod \"48547398-bd84-4b69-b6a8-b3db5e7e32a9\" (UID: \"48547398-bd84-4b69-b6a8-b3db5e7e32a9\") " Oct 06 13:58:49 crc kubenswrapper[4757]: I1006 13:58:49.884024 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-58qvp\" (UniqueName: \"kubernetes.io/projected/48547398-bd84-4b69-b6a8-b3db5e7e32a9-kube-api-access-58qvp\") pod \"48547398-bd84-4b69-b6a8-b3db5e7e32a9\" (UID: \"48547398-bd84-4b69-b6a8-b3db5e7e32a9\") " Oct 06 13:58:49 crc kubenswrapper[4757]: I1006 13:58:49.884081 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/48547398-bd84-4b69-b6a8-b3db5e7e32a9-config-data\") pod \"48547398-bd84-4b69-b6a8-b3db5e7e32a9\" (UID: \"48547398-bd84-4b69-b6a8-b3db5e7e32a9\") " Oct 06 13:58:49 crc kubenswrapper[4757]: I1006 13:58:49.902393 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/48547398-bd84-4b69-b6a8-b3db5e7e32a9-kube-api-access-58qvp" (OuterVolumeSpecName: "kube-api-access-58qvp") pod "48547398-bd84-4b69-b6a8-b3db5e7e32a9" (UID: "48547398-bd84-4b69-b6a8-b3db5e7e32a9"). InnerVolumeSpecName "kube-api-access-58qvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:58:49 crc kubenswrapper[4757]: I1006 13:58:49.902504 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/48547398-bd84-4b69-b6a8-b3db5e7e32a9-scripts" (OuterVolumeSpecName: "scripts") pod "48547398-bd84-4b69-b6a8-b3db5e7e32a9" (UID: "48547398-bd84-4b69-b6a8-b3db5e7e32a9"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:58:49 crc kubenswrapper[4757]: I1006 13:58:49.926919 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/48547398-bd84-4b69-b6a8-b3db5e7e32a9-config-data" (OuterVolumeSpecName: "config-data") pod "48547398-bd84-4b69-b6a8-b3db5e7e32a9" (UID: "48547398-bd84-4b69-b6a8-b3db5e7e32a9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:58:49 crc kubenswrapper[4757]: I1006 13:58:49.964967 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/48547398-bd84-4b69-b6a8-b3db5e7e32a9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "48547398-bd84-4b69-b6a8-b3db5e7e32a9" (UID: "48547398-bd84-4b69-b6a8-b3db5e7e32a9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:58:49 crc kubenswrapper[4757]: I1006 13:58:49.986361 4757 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/48547398-bd84-4b69-b6a8-b3db5e7e32a9-config-data\") on node \"crc\" DevicePath \"\"" Oct 06 13:58:49 crc kubenswrapper[4757]: I1006 13:58:49.986405 4757 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/48547398-bd84-4b69-b6a8-b3db5e7e32a9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 06 13:58:49 crc kubenswrapper[4757]: I1006 13:58:49.986417 4757 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/48547398-bd84-4b69-b6a8-b3db5e7e32a9-scripts\") on node \"crc\" DevicePath \"\"" Oct 06 13:58:49 crc kubenswrapper[4757]: I1006 13:58:49.986431 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-58qvp\" (UniqueName: \"kubernetes.io/projected/48547398-bd84-4b69-b6a8-b3db5e7e32a9-kube-api-access-58qvp\") on node \"crc\" DevicePath \"\"" Oct 06 13:58:50 crc kubenswrapper[4757]: I1006 13:58:50.272798 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-nz4nw" Oct 06 13:58:50 crc kubenswrapper[4757]: I1006 13:58:50.273029 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-nz4nw" event={"ID":"48547398-bd84-4b69-b6a8-b3db5e7e32a9","Type":"ContainerDied","Data":"dc726c7392b3cc69f9193911609ac418b0c8f39a93ac2b51f70075811d46c050"} Oct 06 13:58:50 crc kubenswrapper[4757]: I1006 13:58:50.273084 4757 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dc726c7392b3cc69f9193911609ac418b0c8f39a93ac2b51f70075811d46c050" Oct 06 13:58:50 crc kubenswrapper[4757]: I1006 13:58:50.275364 4757 generic.go:334] "Generic (PLEG): container finished" podID="2ede69c8-f24c-4bf1-82b6-d28d349fb686" containerID="b98bcddeb47dcac8014d7c6dfc9cc3eca9ab1c8761ddfa47739a393cb7f1ea2b" exitCode=0 Oct 06 13:58:50 crc kubenswrapper[4757]: I1006 13:58:50.275445 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67bc848c5c-dclqc" event={"ID":"2ede69c8-f24c-4bf1-82b6-d28d349fb686","Type":"ContainerDied","Data":"b98bcddeb47dcac8014d7c6dfc9cc3eca9ab1c8761ddfa47739a393cb7f1ea2b"} Oct 06 13:58:50 crc kubenswrapper[4757]: I1006 13:58:50.330895 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Oct 06 13:58:50 crc kubenswrapper[4757]: I1006 13:58:50.340557 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67bc848c5c-dclqc" Oct 06 13:58:50 crc kubenswrapper[4757]: I1006 13:58:50.420148 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 06 13:58:50 crc kubenswrapper[4757]: E1006 13:58:50.420900 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2ede69c8-f24c-4bf1-82b6-d28d349fb686" containerName="init" Oct 06 13:58:50 crc kubenswrapper[4757]: I1006 13:58:50.420920 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="2ede69c8-f24c-4bf1-82b6-d28d349fb686" containerName="init" Oct 06 13:58:50 crc kubenswrapper[4757]: E1006 13:58:50.420943 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2ede69c8-f24c-4bf1-82b6-d28d349fb686" containerName="dnsmasq-dns" Oct 06 13:58:50 crc kubenswrapper[4757]: I1006 13:58:50.420951 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="2ede69c8-f24c-4bf1-82b6-d28d349fb686" containerName="dnsmasq-dns" Oct 06 13:58:50 crc kubenswrapper[4757]: E1006 13:58:50.420987 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="48547398-bd84-4b69-b6a8-b3db5e7e32a9" containerName="nova-cell1-conductor-db-sync" Oct 06 13:58:50 crc kubenswrapper[4757]: I1006 13:58:50.420995 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="48547398-bd84-4b69-b6a8-b3db5e7e32a9" containerName="nova-cell1-conductor-db-sync" Oct 06 13:58:50 crc kubenswrapper[4757]: I1006 13:58:50.421325 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="48547398-bd84-4b69-b6a8-b3db5e7e32a9" containerName="nova-cell1-conductor-db-sync" Oct 06 13:58:50 crc kubenswrapper[4757]: I1006 13:58:50.421368 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="2ede69c8-f24c-4bf1-82b6-d28d349fb686" containerName="dnsmasq-dns" Oct 06 13:58:50 crc kubenswrapper[4757]: I1006 13:58:50.422196 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Oct 06 13:58:50 crc kubenswrapper[4757]: I1006 13:58:50.428028 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Oct 06 13:58:50 crc kubenswrapper[4757]: I1006 13:58:50.439731 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 06 13:58:50 crc kubenswrapper[4757]: I1006 13:58:50.512010 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2ede69c8-f24c-4bf1-82b6-d28d349fb686-ovsdbserver-sb\") pod \"2ede69c8-f24c-4bf1-82b6-d28d349fb686\" (UID: \"2ede69c8-f24c-4bf1-82b6-d28d349fb686\") " Oct 06 13:58:50 crc kubenswrapper[4757]: I1006 13:58:50.512874 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2ede69c8-f24c-4bf1-82b6-d28d349fb686-ovsdbserver-nb\") pod \"2ede69c8-f24c-4bf1-82b6-d28d349fb686\" (UID: \"2ede69c8-f24c-4bf1-82b6-d28d349fb686\") " Oct 06 13:58:50 crc kubenswrapper[4757]: I1006 13:58:50.512963 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2ede69c8-f24c-4bf1-82b6-d28d349fb686-dns-svc\") pod \"2ede69c8-f24c-4bf1-82b6-d28d349fb686\" (UID: \"2ede69c8-f24c-4bf1-82b6-d28d349fb686\") " Oct 06 13:58:50 crc kubenswrapper[4757]: I1006 13:58:50.513229 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2ede69c8-f24c-4bf1-82b6-d28d349fb686-dns-swift-storage-0\") pod \"2ede69c8-f24c-4bf1-82b6-d28d349fb686\" (UID: \"2ede69c8-f24c-4bf1-82b6-d28d349fb686\") " Oct 06 13:58:50 crc kubenswrapper[4757]: I1006 13:58:50.513371 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4rqww\" (UniqueName: \"kubernetes.io/projected/2ede69c8-f24c-4bf1-82b6-d28d349fb686-kube-api-access-4rqww\") pod \"2ede69c8-f24c-4bf1-82b6-d28d349fb686\" (UID: \"2ede69c8-f24c-4bf1-82b6-d28d349fb686\") " Oct 06 13:58:50 crc kubenswrapper[4757]: I1006 13:58:50.513469 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2ede69c8-f24c-4bf1-82b6-d28d349fb686-config\") pod \"2ede69c8-f24c-4bf1-82b6-d28d349fb686\" (UID: \"2ede69c8-f24c-4bf1-82b6-d28d349fb686\") " Oct 06 13:58:50 crc kubenswrapper[4757]: I1006 13:58:50.524799 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2ede69c8-f24c-4bf1-82b6-d28d349fb686-kube-api-access-4rqww" (OuterVolumeSpecName: "kube-api-access-4rqww") pod "2ede69c8-f24c-4bf1-82b6-d28d349fb686" (UID: "2ede69c8-f24c-4bf1-82b6-d28d349fb686"). InnerVolumeSpecName "kube-api-access-4rqww". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:58:50 crc kubenswrapper[4757]: I1006 13:58:50.568923 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2ede69c8-f24c-4bf1-82b6-d28d349fb686-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "2ede69c8-f24c-4bf1-82b6-d28d349fb686" (UID: "2ede69c8-f24c-4bf1-82b6-d28d349fb686"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:58:50 crc kubenswrapper[4757]: I1006 13:58:50.578309 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2ede69c8-f24c-4bf1-82b6-d28d349fb686-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "2ede69c8-f24c-4bf1-82b6-d28d349fb686" (UID: "2ede69c8-f24c-4bf1-82b6-d28d349fb686"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:58:50 crc kubenswrapper[4757]: I1006 13:58:50.580361 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2ede69c8-f24c-4bf1-82b6-d28d349fb686-config" (OuterVolumeSpecName: "config") pod "2ede69c8-f24c-4bf1-82b6-d28d349fb686" (UID: "2ede69c8-f24c-4bf1-82b6-d28d349fb686"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:58:50 crc kubenswrapper[4757]: I1006 13:58:50.589811 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2ede69c8-f24c-4bf1-82b6-d28d349fb686-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "2ede69c8-f24c-4bf1-82b6-d28d349fb686" (UID: "2ede69c8-f24c-4bf1-82b6-d28d349fb686"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:58:50 crc kubenswrapper[4757]: I1006 13:58:50.600960 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2ede69c8-f24c-4bf1-82b6-d28d349fb686-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "2ede69c8-f24c-4bf1-82b6-d28d349fb686" (UID: "2ede69c8-f24c-4bf1-82b6-d28d349fb686"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:58:50 crc kubenswrapper[4757]: I1006 13:58:50.622060 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/71099fbe-349d-4a04-857c-41f270ec89af-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"71099fbe-349d-4a04-857c-41f270ec89af\") " pod="openstack/nova-cell1-conductor-0" Oct 06 13:58:50 crc kubenswrapper[4757]: I1006 13:58:50.622326 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/71099fbe-349d-4a04-857c-41f270ec89af-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"71099fbe-349d-4a04-857c-41f270ec89af\") " pod="openstack/nova-cell1-conductor-0" Oct 06 13:58:50 crc kubenswrapper[4757]: I1006 13:58:50.622398 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xdthx\" (UniqueName: \"kubernetes.io/projected/71099fbe-349d-4a04-857c-41f270ec89af-kube-api-access-xdthx\") pod \"nova-cell1-conductor-0\" (UID: \"71099fbe-349d-4a04-857c-41f270ec89af\") " pod="openstack/nova-cell1-conductor-0" Oct 06 13:58:50 crc kubenswrapper[4757]: I1006 13:58:50.622547 4757 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2ede69c8-f24c-4bf1-82b6-d28d349fb686-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 06 13:58:50 crc kubenswrapper[4757]: I1006 13:58:50.622570 4757 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2ede69c8-f24c-4bf1-82b6-d28d349fb686-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 06 13:58:50 crc kubenswrapper[4757]: I1006 13:58:50.622584 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4rqww\" (UniqueName: \"kubernetes.io/projected/2ede69c8-f24c-4bf1-82b6-d28d349fb686-kube-api-access-4rqww\") on node \"crc\" DevicePath \"\"" Oct 06 13:58:50 crc kubenswrapper[4757]: I1006 13:58:50.622597 4757 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2ede69c8-f24c-4bf1-82b6-d28d349fb686-config\") on node \"crc\" DevicePath \"\"" Oct 06 13:58:50 crc kubenswrapper[4757]: I1006 13:58:50.622636 4757 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2ede69c8-f24c-4bf1-82b6-d28d349fb686-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 06 13:58:50 crc kubenswrapper[4757]: I1006 13:58:50.622657 4757 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2ede69c8-f24c-4bf1-82b6-d28d349fb686-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 06 13:58:50 crc kubenswrapper[4757]: I1006 13:58:50.624190 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 06 13:58:50 crc kubenswrapper[4757]: I1006 13:58:50.624242 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 06 13:58:50 crc kubenswrapper[4757]: I1006 13:58:50.694541 4757 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="cc57d075-eaca-4298-a76d-903d3fa3800b" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.184:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 06 13:58:50 crc kubenswrapper[4757]: I1006 13:58:50.694606 4757 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="cc57d075-eaca-4298-a76d-903d3fa3800b" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.184:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 06 13:58:50 crc kubenswrapper[4757]: I1006 13:58:50.723569 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xdthx\" (UniqueName: \"kubernetes.io/projected/71099fbe-349d-4a04-857c-41f270ec89af-kube-api-access-xdthx\") pod \"nova-cell1-conductor-0\" (UID: \"71099fbe-349d-4a04-857c-41f270ec89af\") " pod="openstack/nova-cell1-conductor-0" Oct 06 13:58:50 crc kubenswrapper[4757]: I1006 13:58:50.723736 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/71099fbe-349d-4a04-857c-41f270ec89af-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"71099fbe-349d-4a04-857c-41f270ec89af\") " pod="openstack/nova-cell1-conductor-0" Oct 06 13:58:50 crc kubenswrapper[4757]: I1006 13:58:50.723997 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/71099fbe-349d-4a04-857c-41f270ec89af-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"71099fbe-349d-4a04-857c-41f270ec89af\") " pod="openstack/nova-cell1-conductor-0" Oct 06 13:58:50 crc kubenswrapper[4757]: I1006 13:58:50.727225 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/71099fbe-349d-4a04-857c-41f270ec89af-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"71099fbe-349d-4a04-857c-41f270ec89af\") " pod="openstack/nova-cell1-conductor-0" Oct 06 13:58:50 crc kubenswrapper[4757]: I1006 13:58:50.730883 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-95qtm" Oct 06 13:58:50 crc kubenswrapper[4757]: I1006 13:58:50.732697 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/71099fbe-349d-4a04-857c-41f270ec89af-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"71099fbe-349d-4a04-857c-41f270ec89af\") " pod="openstack/nova-cell1-conductor-0" Oct 06 13:58:50 crc kubenswrapper[4757]: I1006 13:58:50.742204 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xdthx\" (UniqueName: \"kubernetes.io/projected/71099fbe-349d-4a04-857c-41f270ec89af-kube-api-access-xdthx\") pod \"nova-cell1-conductor-0\" (UID: \"71099fbe-349d-4a04-857c-41f270ec89af\") " pod="openstack/nova-cell1-conductor-0" Oct 06 13:58:50 crc kubenswrapper[4757]: I1006 13:58:50.748053 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Oct 06 13:58:50 crc kubenswrapper[4757]: I1006 13:58:50.825290 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ccc640a0-578c-4bdd-9c71-789d3c821099-combined-ca-bundle\") pod \"ccc640a0-578c-4bdd-9c71-789d3c821099\" (UID: \"ccc640a0-578c-4bdd-9c71-789d3c821099\") " Oct 06 13:58:50 crc kubenswrapper[4757]: I1006 13:58:50.825422 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ccc640a0-578c-4bdd-9c71-789d3c821099-config-data\") pod \"ccc640a0-578c-4bdd-9c71-789d3c821099\" (UID: \"ccc640a0-578c-4bdd-9c71-789d3c821099\") " Oct 06 13:58:50 crc kubenswrapper[4757]: I1006 13:58:50.825534 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ccc640a0-578c-4bdd-9c71-789d3c821099-scripts\") pod \"ccc640a0-578c-4bdd-9c71-789d3c821099\" (UID: \"ccc640a0-578c-4bdd-9c71-789d3c821099\") " Oct 06 13:58:50 crc kubenswrapper[4757]: I1006 13:58:50.825578 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lk4wr\" (UniqueName: \"kubernetes.io/projected/ccc640a0-578c-4bdd-9c71-789d3c821099-kube-api-access-lk4wr\") pod \"ccc640a0-578c-4bdd-9c71-789d3c821099\" (UID: \"ccc640a0-578c-4bdd-9c71-789d3c821099\") " Oct 06 13:58:50 crc kubenswrapper[4757]: I1006 13:58:50.831061 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ccc640a0-578c-4bdd-9c71-789d3c821099-scripts" (OuterVolumeSpecName: "scripts") pod "ccc640a0-578c-4bdd-9c71-789d3c821099" (UID: "ccc640a0-578c-4bdd-9c71-789d3c821099"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:58:50 crc kubenswrapper[4757]: I1006 13:58:50.834520 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ccc640a0-578c-4bdd-9c71-789d3c821099-kube-api-access-lk4wr" (OuterVolumeSpecName: "kube-api-access-lk4wr") pod "ccc640a0-578c-4bdd-9c71-789d3c821099" (UID: "ccc640a0-578c-4bdd-9c71-789d3c821099"). InnerVolumeSpecName "kube-api-access-lk4wr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:58:50 crc kubenswrapper[4757]: I1006 13:58:50.859372 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ccc640a0-578c-4bdd-9c71-789d3c821099-config-data" (OuterVolumeSpecName: "config-data") pod "ccc640a0-578c-4bdd-9c71-789d3c821099" (UID: "ccc640a0-578c-4bdd-9c71-789d3c821099"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:58:50 crc kubenswrapper[4757]: I1006 13:58:50.860211 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ccc640a0-578c-4bdd-9c71-789d3c821099-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ccc640a0-578c-4bdd-9c71-789d3c821099" (UID: "ccc640a0-578c-4bdd-9c71-789d3c821099"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:58:50 crc kubenswrapper[4757]: I1006 13:58:50.926698 4757 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ccc640a0-578c-4bdd-9c71-789d3c821099-scripts\") on node \"crc\" DevicePath \"\"" Oct 06 13:58:50 crc kubenswrapper[4757]: I1006 13:58:50.927055 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lk4wr\" (UniqueName: \"kubernetes.io/projected/ccc640a0-578c-4bdd-9c71-789d3c821099-kube-api-access-lk4wr\") on node \"crc\" DevicePath \"\"" Oct 06 13:58:50 crc kubenswrapper[4757]: I1006 13:58:50.927067 4757 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ccc640a0-578c-4bdd-9c71-789d3c821099-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 06 13:58:50 crc kubenswrapper[4757]: I1006 13:58:50.927078 4757 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ccc640a0-578c-4bdd-9c71-789d3c821099-config-data\") on node \"crc\" DevicePath \"\"" Oct 06 13:58:51 crc kubenswrapper[4757]: I1006 13:58:51.219437 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 06 13:58:51 crc kubenswrapper[4757]: W1006 13:58:51.220247 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod71099fbe_349d_4a04_857c_41f270ec89af.slice/crio-e9d33f287fea78d68b9a304f23b081e764bc6d18869fc2bedcbd94d8b4d6083a WatchSource:0}: Error finding container e9d33f287fea78d68b9a304f23b081e764bc6d18869fc2bedcbd94d8b4d6083a: Status 404 returned error can't find the container with id e9d33f287fea78d68b9a304f23b081e764bc6d18869fc2bedcbd94d8b4d6083a Oct 06 13:58:51 crc kubenswrapper[4757]: I1006 13:58:51.285850 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-95qtm" Oct 06 13:58:51 crc kubenswrapper[4757]: I1006 13:58:51.286015 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-95qtm" event={"ID":"ccc640a0-578c-4bdd-9c71-789d3c821099","Type":"ContainerDied","Data":"71339f7cc73c01642c680255844c4d51205d4af94fc65957f1ba7ad57f6d9753"} Oct 06 13:58:51 crc kubenswrapper[4757]: I1006 13:58:51.286038 4757 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="71339f7cc73c01642c680255844c4d51205d4af94fc65957f1ba7ad57f6d9753" Oct 06 13:58:51 crc kubenswrapper[4757]: I1006 13:58:51.287782 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67bc848c5c-dclqc" event={"ID":"2ede69c8-f24c-4bf1-82b6-d28d349fb686","Type":"ContainerDied","Data":"1cdd35eb62cc858b6e0b2b4177ecad7b12e848b20099d8e27623696710521149"} Oct 06 13:58:51 crc kubenswrapper[4757]: I1006 13:58:51.287806 4757 scope.go:117] "RemoveContainer" containerID="b98bcddeb47dcac8014d7c6dfc9cc3eca9ab1c8761ddfa47739a393cb7f1ea2b" Oct 06 13:58:51 crc kubenswrapper[4757]: I1006 13:58:51.287891 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67bc848c5c-dclqc" Oct 06 13:58:51 crc kubenswrapper[4757]: I1006 13:58:51.302338 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"71099fbe-349d-4a04-857c-41f270ec89af","Type":"ContainerStarted","Data":"e9d33f287fea78d68b9a304f23b081e764bc6d18869fc2bedcbd94d8b4d6083a"} Oct 06 13:58:51 crc kubenswrapper[4757]: I1006 13:58:51.329038 4757 scope.go:117] "RemoveContainer" containerID="578b93cc240dbacbd80fe044625bbae34469aef7e7fb41649ffd6cb020e3d8ef" Oct 06 13:58:51 crc kubenswrapper[4757]: I1006 13:58:51.366127 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-67bc848c5c-dclqc"] Oct 06 13:58:51 crc kubenswrapper[4757]: I1006 13:58:51.381931 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-67bc848c5c-dclqc"] Oct 06 13:58:51 crc kubenswrapper[4757]: I1006 13:58:51.393145 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 06 13:58:51 crc kubenswrapper[4757]: I1006 13:58:51.393407 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="cc57d075-eaca-4298-a76d-903d3fa3800b" containerName="nova-api-log" containerID="cri-o://2e60d3baa4da8dd6f518007d4f04dc3a8301c5bb3f84ed83aaf428eddf0f5fa9" gracePeriod=30 Oct 06 13:58:51 crc kubenswrapper[4757]: I1006 13:58:51.393609 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="cc57d075-eaca-4298-a76d-903d3fa3800b" containerName="nova-api-api" containerID="cri-o://57a99ae497bd37615f9b50961f0a594e23bcfd00f8c302548eee047e7257fee2" gracePeriod=30 Oct 06 13:58:51 crc kubenswrapper[4757]: I1006 13:58:51.421333 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 06 13:58:51 crc kubenswrapper[4757]: I1006 13:58:51.436972 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 06 13:58:51 crc kubenswrapper[4757]: I1006 13:58:51.437182 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="ea1b03a8-41fc-4c8f-89a4-42ee48f68b4c" containerName="nova-metadata-log" containerID="cri-o://2fc4e28bb86698c3138c14c68941417defbacc09567daaf6ea0ee191ce2dd5a2" gracePeriod=30 Oct 06 13:58:51 crc kubenswrapper[4757]: I1006 13:58:51.437600 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="ea1b03a8-41fc-4c8f-89a4-42ee48f68b4c" containerName="nova-metadata-metadata" containerID="cri-o://e1b189f0231a5a079e7ff362b9813db280422d514bb10a9167c12451f5bd3d0e" gracePeriod=30 Oct 06 13:58:51 crc kubenswrapper[4757]: E1006 13:58:51.613322 4757 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2ede69c8_f24c_4bf1_82b6_d28d349fb686.slice/crio-1cdd35eb62cc858b6e0b2b4177ecad7b12e848b20099d8e27623696710521149\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcc57d075_eaca_4298_a76d_903d3fa3800b.slice/crio-conmon-2e60d3baa4da8dd6f518007d4f04dc3a8301c5bb3f84ed83aaf428eddf0f5fa9.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2ede69c8_f24c_4bf1_82b6_d28d349fb686.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podea1b03a8_41fc_4c8f_89a4_42ee48f68b4c.slice/crio-2fc4e28bb86698c3138c14c68941417defbacc09567daaf6ea0ee191ce2dd5a2.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcc57d075_eaca_4298_a76d_903d3fa3800b.slice/crio-2e60d3baa4da8dd6f518007d4f04dc3a8301c5bb3f84ed83aaf428eddf0f5fa9.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podea1b03a8_41fc_4c8f_89a4_42ee48f68b4c.slice/crio-conmon-2fc4e28bb86698c3138c14c68941417defbacc09567daaf6ea0ee191ce2dd5a2.scope\": RecentStats: unable to find data in memory cache]" Oct 06 13:58:51 crc kubenswrapper[4757]: I1006 13:58:51.987378 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 06 13:58:52 crc kubenswrapper[4757]: I1006 13:58:52.053606 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ea1b03a8-41fc-4c8f-89a4-42ee48f68b4c-config-data\") pod \"ea1b03a8-41fc-4c8f-89a4-42ee48f68b4c\" (UID: \"ea1b03a8-41fc-4c8f-89a4-42ee48f68b4c\") " Oct 06 13:58:52 crc kubenswrapper[4757]: I1006 13:58:52.054130 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-96rtj\" (UniqueName: \"kubernetes.io/projected/ea1b03a8-41fc-4c8f-89a4-42ee48f68b4c-kube-api-access-96rtj\") pod \"ea1b03a8-41fc-4c8f-89a4-42ee48f68b4c\" (UID: \"ea1b03a8-41fc-4c8f-89a4-42ee48f68b4c\") " Oct 06 13:58:52 crc kubenswrapper[4757]: I1006 13:58:52.054244 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/ea1b03a8-41fc-4c8f-89a4-42ee48f68b4c-nova-metadata-tls-certs\") pod \"ea1b03a8-41fc-4c8f-89a4-42ee48f68b4c\" (UID: \"ea1b03a8-41fc-4c8f-89a4-42ee48f68b4c\") " Oct 06 13:58:52 crc kubenswrapper[4757]: I1006 13:58:52.054395 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ea1b03a8-41fc-4c8f-89a4-42ee48f68b4c-logs\") pod \"ea1b03a8-41fc-4c8f-89a4-42ee48f68b4c\" (UID: \"ea1b03a8-41fc-4c8f-89a4-42ee48f68b4c\") " Oct 06 13:58:52 crc kubenswrapper[4757]: I1006 13:58:52.054497 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ea1b03a8-41fc-4c8f-89a4-42ee48f68b4c-combined-ca-bundle\") pod \"ea1b03a8-41fc-4c8f-89a4-42ee48f68b4c\" (UID: \"ea1b03a8-41fc-4c8f-89a4-42ee48f68b4c\") " Oct 06 13:58:52 crc kubenswrapper[4757]: I1006 13:58:52.054692 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ea1b03a8-41fc-4c8f-89a4-42ee48f68b4c-logs" (OuterVolumeSpecName: "logs") pod "ea1b03a8-41fc-4c8f-89a4-42ee48f68b4c" (UID: "ea1b03a8-41fc-4c8f-89a4-42ee48f68b4c"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 13:58:52 crc kubenswrapper[4757]: I1006 13:58:52.055201 4757 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ea1b03a8-41fc-4c8f-89a4-42ee48f68b4c-logs\") on node \"crc\" DevicePath \"\"" Oct 06 13:58:52 crc kubenswrapper[4757]: I1006 13:58:52.059867 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ea1b03a8-41fc-4c8f-89a4-42ee48f68b4c-kube-api-access-96rtj" (OuterVolumeSpecName: "kube-api-access-96rtj") pod "ea1b03a8-41fc-4c8f-89a4-42ee48f68b4c" (UID: "ea1b03a8-41fc-4c8f-89a4-42ee48f68b4c"). InnerVolumeSpecName "kube-api-access-96rtj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:58:52 crc kubenswrapper[4757]: I1006 13:58:52.081498 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ea1b03a8-41fc-4c8f-89a4-42ee48f68b4c-config-data" (OuterVolumeSpecName: "config-data") pod "ea1b03a8-41fc-4c8f-89a4-42ee48f68b4c" (UID: "ea1b03a8-41fc-4c8f-89a4-42ee48f68b4c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:58:52 crc kubenswrapper[4757]: I1006 13:58:52.081551 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ea1b03a8-41fc-4c8f-89a4-42ee48f68b4c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ea1b03a8-41fc-4c8f-89a4-42ee48f68b4c" (UID: "ea1b03a8-41fc-4c8f-89a4-42ee48f68b4c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:58:52 crc kubenswrapper[4757]: I1006 13:58:52.111005 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ea1b03a8-41fc-4c8f-89a4-42ee48f68b4c-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "ea1b03a8-41fc-4c8f-89a4-42ee48f68b4c" (UID: "ea1b03a8-41fc-4c8f-89a4-42ee48f68b4c"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:58:52 crc kubenswrapper[4757]: I1006 13:58:52.157157 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-96rtj\" (UniqueName: \"kubernetes.io/projected/ea1b03a8-41fc-4c8f-89a4-42ee48f68b4c-kube-api-access-96rtj\") on node \"crc\" DevicePath \"\"" Oct 06 13:58:52 crc kubenswrapper[4757]: I1006 13:58:52.157186 4757 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/ea1b03a8-41fc-4c8f-89a4-42ee48f68b4c-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 06 13:58:52 crc kubenswrapper[4757]: I1006 13:58:52.157195 4757 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ea1b03a8-41fc-4c8f-89a4-42ee48f68b4c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 06 13:58:52 crc kubenswrapper[4757]: I1006 13:58:52.157204 4757 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ea1b03a8-41fc-4c8f-89a4-42ee48f68b4c-config-data\") on node \"crc\" DevicePath \"\"" Oct 06 13:58:52 crc kubenswrapper[4757]: I1006 13:58:52.191804 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2ede69c8-f24c-4bf1-82b6-d28d349fb686" path="/var/lib/kubelet/pods/2ede69c8-f24c-4bf1-82b6-d28d349fb686/volumes" Oct 06 13:58:52 crc kubenswrapper[4757]: I1006 13:58:52.312599 4757 generic.go:334] "Generic (PLEG): container finished" podID="ea1b03a8-41fc-4c8f-89a4-42ee48f68b4c" containerID="e1b189f0231a5a079e7ff362b9813db280422d514bb10a9167c12451f5bd3d0e" exitCode=0 Oct 06 13:58:52 crc kubenswrapper[4757]: I1006 13:58:52.312629 4757 generic.go:334] "Generic (PLEG): container finished" podID="ea1b03a8-41fc-4c8f-89a4-42ee48f68b4c" containerID="2fc4e28bb86698c3138c14c68941417defbacc09567daaf6ea0ee191ce2dd5a2" exitCode=143 Oct 06 13:58:52 crc kubenswrapper[4757]: I1006 13:58:52.312660 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"ea1b03a8-41fc-4c8f-89a4-42ee48f68b4c","Type":"ContainerDied","Data":"e1b189f0231a5a079e7ff362b9813db280422d514bb10a9167c12451f5bd3d0e"} Oct 06 13:58:52 crc kubenswrapper[4757]: I1006 13:58:52.312682 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"ea1b03a8-41fc-4c8f-89a4-42ee48f68b4c","Type":"ContainerDied","Data":"2fc4e28bb86698c3138c14c68941417defbacc09567daaf6ea0ee191ce2dd5a2"} Oct 06 13:58:52 crc kubenswrapper[4757]: I1006 13:58:52.312692 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"ea1b03a8-41fc-4c8f-89a4-42ee48f68b4c","Type":"ContainerDied","Data":"ad36a74cf7cca1ad2a2a573cede5dea8d982b57d2c320a9cfa11eee54c9b1c42"} Oct 06 13:58:52 crc kubenswrapper[4757]: I1006 13:58:52.312706 4757 scope.go:117] "RemoveContainer" containerID="e1b189f0231a5a079e7ff362b9813db280422d514bb10a9167c12451f5bd3d0e" Oct 06 13:58:52 crc kubenswrapper[4757]: I1006 13:58:52.312796 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 06 13:58:52 crc kubenswrapper[4757]: I1006 13:58:52.317610 4757 generic.go:334] "Generic (PLEG): container finished" podID="cc57d075-eaca-4298-a76d-903d3fa3800b" containerID="2e60d3baa4da8dd6f518007d4f04dc3a8301c5bb3f84ed83aaf428eddf0f5fa9" exitCode=143 Oct 06 13:58:52 crc kubenswrapper[4757]: I1006 13:58:52.317662 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"cc57d075-eaca-4298-a76d-903d3fa3800b","Type":"ContainerDied","Data":"2e60d3baa4da8dd6f518007d4f04dc3a8301c5bb3f84ed83aaf428eddf0f5fa9"} Oct 06 13:58:52 crc kubenswrapper[4757]: I1006 13:58:52.339052 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="0629d26d-a456-477c-9adf-54885b0ff36f" containerName="nova-scheduler-scheduler" containerID="cri-o://944183a571c7c2fc6c984f92c3b0afc82536909cb0fa41c943c8062170e065bb" gracePeriod=30 Oct 06 13:58:52 crc kubenswrapper[4757]: I1006 13:58:52.339074 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"71099fbe-349d-4a04-857c-41f270ec89af","Type":"ContainerStarted","Data":"ab2102972a92e9dd4b82cbd0edb8ec4655819cab11c8d022f7994573563037f4"} Oct 06 13:58:52 crc kubenswrapper[4757]: I1006 13:58:52.339395 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Oct 06 13:58:52 crc kubenswrapper[4757]: I1006 13:58:52.339933 4757 scope.go:117] "RemoveContainer" containerID="2fc4e28bb86698c3138c14c68941417defbacc09567daaf6ea0ee191ce2dd5a2" Oct 06 13:58:52 crc kubenswrapper[4757]: I1006 13:58:52.373199 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 06 13:58:52 crc kubenswrapper[4757]: I1006 13:58:52.386839 4757 scope.go:117] "RemoveContainer" containerID="e1b189f0231a5a079e7ff362b9813db280422d514bb10a9167c12451f5bd3d0e" Oct 06 13:58:52 crc kubenswrapper[4757]: E1006 13:58:52.389390 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e1b189f0231a5a079e7ff362b9813db280422d514bb10a9167c12451f5bd3d0e\": container with ID starting with e1b189f0231a5a079e7ff362b9813db280422d514bb10a9167c12451f5bd3d0e not found: ID does not exist" containerID="e1b189f0231a5a079e7ff362b9813db280422d514bb10a9167c12451f5bd3d0e" Oct 06 13:58:52 crc kubenswrapper[4757]: I1006 13:58:52.389443 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e1b189f0231a5a079e7ff362b9813db280422d514bb10a9167c12451f5bd3d0e"} err="failed to get container status \"e1b189f0231a5a079e7ff362b9813db280422d514bb10a9167c12451f5bd3d0e\": rpc error: code = NotFound desc = could not find container \"e1b189f0231a5a079e7ff362b9813db280422d514bb10a9167c12451f5bd3d0e\": container with ID starting with e1b189f0231a5a079e7ff362b9813db280422d514bb10a9167c12451f5bd3d0e not found: ID does not exist" Oct 06 13:58:52 crc kubenswrapper[4757]: I1006 13:58:52.389474 4757 scope.go:117] "RemoveContainer" containerID="2fc4e28bb86698c3138c14c68941417defbacc09567daaf6ea0ee191ce2dd5a2" Oct 06 13:58:52 crc kubenswrapper[4757]: E1006 13:58:52.399390 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2fc4e28bb86698c3138c14c68941417defbacc09567daaf6ea0ee191ce2dd5a2\": container with ID starting with 2fc4e28bb86698c3138c14c68941417defbacc09567daaf6ea0ee191ce2dd5a2 not found: ID does not exist" containerID="2fc4e28bb86698c3138c14c68941417defbacc09567daaf6ea0ee191ce2dd5a2" Oct 06 13:58:52 crc kubenswrapper[4757]: I1006 13:58:52.399453 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2fc4e28bb86698c3138c14c68941417defbacc09567daaf6ea0ee191ce2dd5a2"} err="failed to get container status \"2fc4e28bb86698c3138c14c68941417defbacc09567daaf6ea0ee191ce2dd5a2\": rpc error: code = NotFound desc = could not find container \"2fc4e28bb86698c3138c14c68941417defbacc09567daaf6ea0ee191ce2dd5a2\": container with ID starting with 2fc4e28bb86698c3138c14c68941417defbacc09567daaf6ea0ee191ce2dd5a2 not found: ID does not exist" Oct 06 13:58:52 crc kubenswrapper[4757]: I1006 13:58:52.399482 4757 scope.go:117] "RemoveContainer" containerID="e1b189f0231a5a079e7ff362b9813db280422d514bb10a9167c12451f5bd3d0e" Oct 06 13:58:52 crc kubenswrapper[4757]: I1006 13:58:52.402160 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Oct 06 13:58:52 crc kubenswrapper[4757]: I1006 13:58:52.404576 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e1b189f0231a5a079e7ff362b9813db280422d514bb10a9167c12451f5bd3d0e"} err="failed to get container status \"e1b189f0231a5a079e7ff362b9813db280422d514bb10a9167c12451f5bd3d0e\": rpc error: code = NotFound desc = could not find container \"e1b189f0231a5a079e7ff362b9813db280422d514bb10a9167c12451f5bd3d0e\": container with ID starting with e1b189f0231a5a079e7ff362b9813db280422d514bb10a9167c12451f5bd3d0e not found: ID does not exist" Oct 06 13:58:52 crc kubenswrapper[4757]: I1006 13:58:52.404626 4757 scope.go:117] "RemoveContainer" containerID="2fc4e28bb86698c3138c14c68941417defbacc09567daaf6ea0ee191ce2dd5a2" Oct 06 13:58:52 crc kubenswrapper[4757]: I1006 13:58:52.404982 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2fc4e28bb86698c3138c14c68941417defbacc09567daaf6ea0ee191ce2dd5a2"} err="failed to get container status \"2fc4e28bb86698c3138c14c68941417defbacc09567daaf6ea0ee191ce2dd5a2\": rpc error: code = NotFound desc = could not find container \"2fc4e28bb86698c3138c14c68941417defbacc09567daaf6ea0ee191ce2dd5a2\": container with ID starting with 2fc4e28bb86698c3138c14c68941417defbacc09567daaf6ea0ee191ce2dd5a2 not found: ID does not exist" Oct 06 13:58:52 crc kubenswrapper[4757]: I1006 13:58:52.424252 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Oct 06 13:58:52 crc kubenswrapper[4757]: E1006 13:58:52.424768 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ccc640a0-578c-4bdd-9c71-789d3c821099" containerName="nova-manage" Oct 06 13:58:52 crc kubenswrapper[4757]: I1006 13:58:52.424785 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="ccc640a0-578c-4bdd-9c71-789d3c821099" containerName="nova-manage" Oct 06 13:58:52 crc kubenswrapper[4757]: E1006 13:58:52.424817 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea1b03a8-41fc-4c8f-89a4-42ee48f68b4c" containerName="nova-metadata-metadata" Oct 06 13:58:52 crc kubenswrapper[4757]: I1006 13:58:52.424827 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea1b03a8-41fc-4c8f-89a4-42ee48f68b4c" containerName="nova-metadata-metadata" Oct 06 13:58:52 crc kubenswrapper[4757]: E1006 13:58:52.424840 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea1b03a8-41fc-4c8f-89a4-42ee48f68b4c" containerName="nova-metadata-log" Oct 06 13:58:52 crc kubenswrapper[4757]: I1006 13:58:52.424890 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea1b03a8-41fc-4c8f-89a4-42ee48f68b4c" containerName="nova-metadata-log" Oct 06 13:58:52 crc kubenswrapper[4757]: I1006 13:58:52.425082 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="ea1b03a8-41fc-4c8f-89a4-42ee48f68b4c" containerName="nova-metadata-metadata" Oct 06 13:58:52 crc kubenswrapper[4757]: I1006 13:58:52.425150 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="ccc640a0-578c-4bdd-9c71-789d3c821099" containerName="nova-manage" Oct 06 13:58:52 crc kubenswrapper[4757]: I1006 13:58:52.425169 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="ea1b03a8-41fc-4c8f-89a4-42ee48f68b4c" containerName="nova-metadata-log" Oct 06 13:58:52 crc kubenswrapper[4757]: I1006 13:58:52.426348 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 06 13:58:52 crc kubenswrapper[4757]: I1006 13:58:52.432491 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Oct 06 13:58:52 crc kubenswrapper[4757]: I1006 13:58:52.432608 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Oct 06 13:58:52 crc kubenswrapper[4757]: I1006 13:58:52.434558 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 06 13:58:52 crc kubenswrapper[4757]: I1006 13:58:52.439410 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.439392425 podStartE2EDuration="2.439392425s" podCreationTimestamp="2025-10-06 13:58:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:58:52.386654848 +0000 UTC m=+1220.883973385" watchObservedRunningTime="2025-10-06 13:58:52.439392425 +0000 UTC m=+1220.936710962" Oct 06 13:58:52 crc kubenswrapper[4757]: I1006 13:58:52.461267 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3dac5395-f6e7-4731-af9c-813f1863e380-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"3dac5395-f6e7-4731-af9c-813f1863e380\") " pod="openstack/nova-metadata-0" Oct 06 13:58:52 crc kubenswrapper[4757]: I1006 13:58:52.461384 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-twbl2\" (UniqueName: \"kubernetes.io/projected/3dac5395-f6e7-4731-af9c-813f1863e380-kube-api-access-twbl2\") pod \"nova-metadata-0\" (UID: \"3dac5395-f6e7-4731-af9c-813f1863e380\") " pod="openstack/nova-metadata-0" Oct 06 13:58:52 crc kubenswrapper[4757]: I1006 13:58:52.461450 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3dac5395-f6e7-4731-af9c-813f1863e380-config-data\") pod \"nova-metadata-0\" (UID: \"3dac5395-f6e7-4731-af9c-813f1863e380\") " pod="openstack/nova-metadata-0" Oct 06 13:58:52 crc kubenswrapper[4757]: I1006 13:58:52.461469 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3dac5395-f6e7-4731-af9c-813f1863e380-logs\") pod \"nova-metadata-0\" (UID: \"3dac5395-f6e7-4731-af9c-813f1863e380\") " pod="openstack/nova-metadata-0" Oct 06 13:58:52 crc kubenswrapper[4757]: I1006 13:58:52.461627 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/3dac5395-f6e7-4731-af9c-813f1863e380-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"3dac5395-f6e7-4731-af9c-813f1863e380\") " pod="openstack/nova-metadata-0" Oct 06 13:58:52 crc kubenswrapper[4757]: I1006 13:58:52.562889 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3dac5395-f6e7-4731-af9c-813f1863e380-config-data\") pod \"nova-metadata-0\" (UID: \"3dac5395-f6e7-4731-af9c-813f1863e380\") " pod="openstack/nova-metadata-0" Oct 06 13:58:52 crc kubenswrapper[4757]: I1006 13:58:52.562956 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3dac5395-f6e7-4731-af9c-813f1863e380-logs\") pod \"nova-metadata-0\" (UID: \"3dac5395-f6e7-4731-af9c-813f1863e380\") " pod="openstack/nova-metadata-0" Oct 06 13:58:52 crc kubenswrapper[4757]: I1006 13:58:52.563443 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3dac5395-f6e7-4731-af9c-813f1863e380-logs\") pod \"nova-metadata-0\" (UID: \"3dac5395-f6e7-4731-af9c-813f1863e380\") " pod="openstack/nova-metadata-0" Oct 06 13:58:52 crc kubenswrapper[4757]: I1006 13:58:52.563512 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/3dac5395-f6e7-4731-af9c-813f1863e380-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"3dac5395-f6e7-4731-af9c-813f1863e380\") " pod="openstack/nova-metadata-0" Oct 06 13:58:52 crc kubenswrapper[4757]: I1006 13:58:52.564415 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3dac5395-f6e7-4731-af9c-813f1863e380-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"3dac5395-f6e7-4731-af9c-813f1863e380\") " pod="openstack/nova-metadata-0" Oct 06 13:58:52 crc kubenswrapper[4757]: I1006 13:58:52.564518 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-twbl2\" (UniqueName: \"kubernetes.io/projected/3dac5395-f6e7-4731-af9c-813f1863e380-kube-api-access-twbl2\") pod \"nova-metadata-0\" (UID: \"3dac5395-f6e7-4731-af9c-813f1863e380\") " pod="openstack/nova-metadata-0" Oct 06 13:58:52 crc kubenswrapper[4757]: I1006 13:58:52.566757 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3dac5395-f6e7-4731-af9c-813f1863e380-config-data\") pod \"nova-metadata-0\" (UID: \"3dac5395-f6e7-4731-af9c-813f1863e380\") " pod="openstack/nova-metadata-0" Oct 06 13:58:52 crc kubenswrapper[4757]: I1006 13:58:52.568660 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3dac5395-f6e7-4731-af9c-813f1863e380-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"3dac5395-f6e7-4731-af9c-813f1863e380\") " pod="openstack/nova-metadata-0" Oct 06 13:58:52 crc kubenswrapper[4757]: I1006 13:58:52.570521 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/3dac5395-f6e7-4731-af9c-813f1863e380-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"3dac5395-f6e7-4731-af9c-813f1863e380\") " pod="openstack/nova-metadata-0" Oct 06 13:58:52 crc kubenswrapper[4757]: I1006 13:58:52.582824 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-twbl2\" (UniqueName: \"kubernetes.io/projected/3dac5395-f6e7-4731-af9c-813f1863e380-kube-api-access-twbl2\") pod \"nova-metadata-0\" (UID: \"3dac5395-f6e7-4731-af9c-813f1863e380\") " pod="openstack/nova-metadata-0" Oct 06 13:58:52 crc kubenswrapper[4757]: I1006 13:58:52.748459 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 06 13:58:53 crc kubenswrapper[4757]: I1006 13:58:53.228399 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 06 13:58:53 crc kubenswrapper[4757]: I1006 13:58:53.356552 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"3dac5395-f6e7-4731-af9c-813f1863e380","Type":"ContainerStarted","Data":"51500f68601534894ca385810250f3e13fc5c0f39a888a88787dbd5aa8f09673"} Oct 06 13:58:54 crc kubenswrapper[4757]: I1006 13:58:54.207198 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ea1b03a8-41fc-4c8f-89a4-42ee48f68b4c" path="/var/lib/kubelet/pods/ea1b03a8-41fc-4c8f-89a4-42ee48f68b4c/volumes" Oct 06 13:58:54 crc kubenswrapper[4757]: I1006 13:58:54.375433 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"3dac5395-f6e7-4731-af9c-813f1863e380","Type":"ContainerStarted","Data":"801178c0cb4ca905bb061c23b2ba73d8ef853b39f1f006e92aaa5766c0055f4a"} Oct 06 13:58:54 crc kubenswrapper[4757]: I1006 13:58:54.375506 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"3dac5395-f6e7-4731-af9c-813f1863e380","Type":"ContainerStarted","Data":"8907f890429a8a18c28d5bc247e937c79db73296823725bac7d87caa3d5feb27"} Oct 06 13:58:54 crc kubenswrapper[4757]: I1006 13:58:54.418614 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.418488552 podStartE2EDuration="2.418488552s" podCreationTimestamp="2025-10-06 13:58:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:58:54.402878603 +0000 UTC m=+1222.900197180" watchObservedRunningTime="2025-10-06 13:58:54.418488552 +0000 UTC m=+1222.915807099" Oct 06 13:58:54 crc kubenswrapper[4757]: E1006 13:58:54.793567 4757 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="944183a571c7c2fc6c984f92c3b0afc82536909cb0fa41c943c8062170e065bb" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 06 13:58:54 crc kubenswrapper[4757]: E1006 13:58:54.795127 4757 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="944183a571c7c2fc6c984f92c3b0afc82536909cb0fa41c943c8062170e065bb" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 06 13:58:54 crc kubenswrapper[4757]: E1006 13:58:54.796203 4757 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="944183a571c7c2fc6c984f92c3b0afc82536909cb0fa41c943c8062170e065bb" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 06 13:58:54 crc kubenswrapper[4757]: E1006 13:58:54.796249 4757 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="0629d26d-a456-477c-9adf-54885b0ff36f" containerName="nova-scheduler-scheduler" Oct 06 13:58:55 crc kubenswrapper[4757]: I1006 13:58:55.315037 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Oct 06 13:58:55 crc kubenswrapper[4757]: I1006 13:58:55.386680 4757 generic.go:334] "Generic (PLEG): container finished" podID="0629d26d-a456-477c-9adf-54885b0ff36f" containerID="944183a571c7c2fc6c984f92c3b0afc82536909cb0fa41c943c8062170e065bb" exitCode=0 Oct 06 13:58:55 crc kubenswrapper[4757]: I1006 13:58:55.387636 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"0629d26d-a456-477c-9adf-54885b0ff36f","Type":"ContainerDied","Data":"944183a571c7c2fc6c984f92c3b0afc82536909cb0fa41c943c8062170e065bb"} Oct 06 13:58:55 crc kubenswrapper[4757]: I1006 13:58:55.498812 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 06 13:58:55 crc kubenswrapper[4757]: I1006 13:58:55.529671 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0629d26d-a456-477c-9adf-54885b0ff36f-config-data\") pod \"0629d26d-a456-477c-9adf-54885b0ff36f\" (UID: \"0629d26d-a456-477c-9adf-54885b0ff36f\") " Oct 06 13:58:55 crc kubenswrapper[4757]: I1006 13:58:55.529787 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tv489\" (UniqueName: \"kubernetes.io/projected/0629d26d-a456-477c-9adf-54885b0ff36f-kube-api-access-tv489\") pod \"0629d26d-a456-477c-9adf-54885b0ff36f\" (UID: \"0629d26d-a456-477c-9adf-54885b0ff36f\") " Oct 06 13:58:55 crc kubenswrapper[4757]: I1006 13:58:55.529833 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0629d26d-a456-477c-9adf-54885b0ff36f-combined-ca-bundle\") pod \"0629d26d-a456-477c-9adf-54885b0ff36f\" (UID: \"0629d26d-a456-477c-9adf-54885b0ff36f\") " Oct 06 13:58:55 crc kubenswrapper[4757]: I1006 13:58:55.537356 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0629d26d-a456-477c-9adf-54885b0ff36f-kube-api-access-tv489" (OuterVolumeSpecName: "kube-api-access-tv489") pod "0629d26d-a456-477c-9adf-54885b0ff36f" (UID: "0629d26d-a456-477c-9adf-54885b0ff36f"). InnerVolumeSpecName "kube-api-access-tv489". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:58:55 crc kubenswrapper[4757]: I1006 13:58:55.565232 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0629d26d-a456-477c-9adf-54885b0ff36f-config-data" (OuterVolumeSpecName: "config-data") pod "0629d26d-a456-477c-9adf-54885b0ff36f" (UID: "0629d26d-a456-477c-9adf-54885b0ff36f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:58:55 crc kubenswrapper[4757]: I1006 13:58:55.569858 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0629d26d-a456-477c-9adf-54885b0ff36f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0629d26d-a456-477c-9adf-54885b0ff36f" (UID: "0629d26d-a456-477c-9adf-54885b0ff36f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:58:55 crc kubenswrapper[4757]: I1006 13:58:55.632519 4757 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0629d26d-a456-477c-9adf-54885b0ff36f-config-data\") on node \"crc\" DevicePath \"\"" Oct 06 13:58:55 crc kubenswrapper[4757]: I1006 13:58:55.632554 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tv489\" (UniqueName: \"kubernetes.io/projected/0629d26d-a456-477c-9adf-54885b0ff36f-kube-api-access-tv489\") on node \"crc\" DevicePath \"\"" Oct 06 13:58:55 crc kubenswrapper[4757]: I1006 13:58:55.632566 4757 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0629d26d-a456-477c-9adf-54885b0ff36f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 06 13:58:56 crc kubenswrapper[4757]: I1006 13:58:56.327613 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 06 13:58:56 crc kubenswrapper[4757]: I1006 13:58:56.398946 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"0629d26d-a456-477c-9adf-54885b0ff36f","Type":"ContainerDied","Data":"7d8d29650dacf7f04e90e80aeeecb75e87c46a38adfbd1c6a8519ba3210d06ac"} Oct 06 13:58:56 crc kubenswrapper[4757]: I1006 13:58:56.399010 4757 scope.go:117] "RemoveContainer" containerID="944183a571c7c2fc6c984f92c3b0afc82536909cb0fa41c943c8062170e065bb" Oct 06 13:58:56 crc kubenswrapper[4757]: I1006 13:58:56.399191 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 06 13:58:56 crc kubenswrapper[4757]: I1006 13:58:56.404762 4757 generic.go:334] "Generic (PLEG): container finished" podID="cc57d075-eaca-4298-a76d-903d3fa3800b" containerID="57a99ae497bd37615f9b50961f0a594e23bcfd00f8c302548eee047e7257fee2" exitCode=0 Oct 06 13:58:56 crc kubenswrapper[4757]: I1006 13:58:56.404807 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"cc57d075-eaca-4298-a76d-903d3fa3800b","Type":"ContainerDied","Data":"57a99ae497bd37615f9b50961f0a594e23bcfd00f8c302548eee047e7257fee2"} Oct 06 13:58:56 crc kubenswrapper[4757]: I1006 13:58:56.404840 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"cc57d075-eaca-4298-a76d-903d3fa3800b","Type":"ContainerDied","Data":"464ecfbf7913be16b6b3c8c76b0f1f77ecae0a404a825bb5c12d096e1b0d4018"} Oct 06 13:58:56 crc kubenswrapper[4757]: I1006 13:58:56.404892 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 06 13:58:56 crc kubenswrapper[4757]: I1006 13:58:56.421692 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 06 13:58:56 crc kubenswrapper[4757]: I1006 13:58:56.435720 4757 scope.go:117] "RemoveContainer" containerID="57a99ae497bd37615f9b50961f0a594e23bcfd00f8c302548eee047e7257fee2" Oct 06 13:58:56 crc kubenswrapper[4757]: I1006 13:58:56.438561 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Oct 06 13:58:56 crc kubenswrapper[4757]: I1006 13:58:56.445834 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc57d075-eaca-4298-a76d-903d3fa3800b-combined-ca-bundle\") pod \"cc57d075-eaca-4298-a76d-903d3fa3800b\" (UID: \"cc57d075-eaca-4298-a76d-903d3fa3800b\") " Oct 06 13:58:56 crc kubenswrapper[4757]: I1006 13:58:56.445983 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dfnvr\" (UniqueName: \"kubernetes.io/projected/cc57d075-eaca-4298-a76d-903d3fa3800b-kube-api-access-dfnvr\") pod \"cc57d075-eaca-4298-a76d-903d3fa3800b\" (UID: \"cc57d075-eaca-4298-a76d-903d3fa3800b\") " Oct 06 13:58:56 crc kubenswrapper[4757]: I1006 13:58:56.446034 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cc57d075-eaca-4298-a76d-903d3fa3800b-logs\") pod \"cc57d075-eaca-4298-a76d-903d3fa3800b\" (UID: \"cc57d075-eaca-4298-a76d-903d3fa3800b\") " Oct 06 13:58:56 crc kubenswrapper[4757]: I1006 13:58:56.446164 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cc57d075-eaca-4298-a76d-903d3fa3800b-config-data\") pod \"cc57d075-eaca-4298-a76d-903d3fa3800b\" (UID: \"cc57d075-eaca-4298-a76d-903d3fa3800b\") " Oct 06 13:58:56 crc kubenswrapper[4757]: I1006 13:58:56.446514 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cc57d075-eaca-4298-a76d-903d3fa3800b-logs" (OuterVolumeSpecName: "logs") pod "cc57d075-eaca-4298-a76d-903d3fa3800b" (UID: "cc57d075-eaca-4298-a76d-903d3fa3800b"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 13:58:56 crc kubenswrapper[4757]: I1006 13:58:56.447203 4757 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cc57d075-eaca-4298-a76d-903d3fa3800b-logs\") on node \"crc\" DevicePath \"\"" Oct 06 13:58:56 crc kubenswrapper[4757]: I1006 13:58:56.453037 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Oct 06 13:58:56 crc kubenswrapper[4757]: I1006 13:58:56.453123 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cc57d075-eaca-4298-a76d-903d3fa3800b-kube-api-access-dfnvr" (OuterVolumeSpecName: "kube-api-access-dfnvr") pod "cc57d075-eaca-4298-a76d-903d3fa3800b" (UID: "cc57d075-eaca-4298-a76d-903d3fa3800b"). InnerVolumeSpecName "kube-api-access-dfnvr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:58:56 crc kubenswrapper[4757]: E1006 13:58:56.453482 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cc57d075-eaca-4298-a76d-903d3fa3800b" containerName="nova-api-api" Oct 06 13:58:56 crc kubenswrapper[4757]: I1006 13:58:56.453500 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="cc57d075-eaca-4298-a76d-903d3fa3800b" containerName="nova-api-api" Oct 06 13:58:56 crc kubenswrapper[4757]: E1006 13:58:56.453520 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cc57d075-eaca-4298-a76d-903d3fa3800b" containerName="nova-api-log" Oct 06 13:58:56 crc kubenswrapper[4757]: I1006 13:58:56.453527 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="cc57d075-eaca-4298-a76d-903d3fa3800b" containerName="nova-api-log" Oct 06 13:58:56 crc kubenswrapper[4757]: E1006 13:58:56.453544 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0629d26d-a456-477c-9adf-54885b0ff36f" containerName="nova-scheduler-scheduler" Oct 06 13:58:56 crc kubenswrapper[4757]: I1006 13:58:56.453550 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="0629d26d-a456-477c-9adf-54885b0ff36f" containerName="nova-scheduler-scheduler" Oct 06 13:58:56 crc kubenswrapper[4757]: I1006 13:58:56.453733 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="cc57d075-eaca-4298-a76d-903d3fa3800b" containerName="nova-api-api" Oct 06 13:58:56 crc kubenswrapper[4757]: I1006 13:58:56.453757 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="cc57d075-eaca-4298-a76d-903d3fa3800b" containerName="nova-api-log" Oct 06 13:58:56 crc kubenswrapper[4757]: I1006 13:58:56.453767 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="0629d26d-a456-477c-9adf-54885b0ff36f" containerName="nova-scheduler-scheduler" Oct 06 13:58:56 crc kubenswrapper[4757]: I1006 13:58:56.454400 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 06 13:58:56 crc kubenswrapper[4757]: I1006 13:58:56.458354 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Oct 06 13:58:56 crc kubenswrapper[4757]: I1006 13:58:56.462563 4757 scope.go:117] "RemoveContainer" containerID="2e60d3baa4da8dd6f518007d4f04dc3a8301c5bb3f84ed83aaf428eddf0f5fa9" Oct 06 13:58:56 crc kubenswrapper[4757]: I1006 13:58:56.463358 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 06 13:58:56 crc kubenswrapper[4757]: I1006 13:58:56.483419 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cc57d075-eaca-4298-a76d-903d3fa3800b-config-data" (OuterVolumeSpecName: "config-data") pod "cc57d075-eaca-4298-a76d-903d3fa3800b" (UID: "cc57d075-eaca-4298-a76d-903d3fa3800b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:58:56 crc kubenswrapper[4757]: I1006 13:58:56.487424 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cc57d075-eaca-4298-a76d-903d3fa3800b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cc57d075-eaca-4298-a76d-903d3fa3800b" (UID: "cc57d075-eaca-4298-a76d-903d3fa3800b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:58:56 crc kubenswrapper[4757]: I1006 13:58:56.549449 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/acb2a4db-c5b2-4d68-a1ab-bd3fcdf578cd-config-data\") pod \"nova-scheduler-0\" (UID: \"acb2a4db-c5b2-4d68-a1ab-bd3fcdf578cd\") " pod="openstack/nova-scheduler-0" Oct 06 13:58:56 crc kubenswrapper[4757]: I1006 13:58:56.549751 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/acb2a4db-c5b2-4d68-a1ab-bd3fcdf578cd-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"acb2a4db-c5b2-4d68-a1ab-bd3fcdf578cd\") " pod="openstack/nova-scheduler-0" Oct 06 13:58:56 crc kubenswrapper[4757]: I1006 13:58:56.550082 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bmbrm\" (UniqueName: \"kubernetes.io/projected/acb2a4db-c5b2-4d68-a1ab-bd3fcdf578cd-kube-api-access-bmbrm\") pod \"nova-scheduler-0\" (UID: \"acb2a4db-c5b2-4d68-a1ab-bd3fcdf578cd\") " pod="openstack/nova-scheduler-0" Oct 06 13:58:56 crc kubenswrapper[4757]: I1006 13:58:56.550285 4757 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cc57d075-eaca-4298-a76d-903d3fa3800b-config-data\") on node \"crc\" DevicePath \"\"" Oct 06 13:58:56 crc kubenswrapper[4757]: I1006 13:58:56.550380 4757 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc57d075-eaca-4298-a76d-903d3fa3800b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 06 13:58:56 crc kubenswrapper[4757]: I1006 13:58:56.550459 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dfnvr\" (UniqueName: \"kubernetes.io/projected/cc57d075-eaca-4298-a76d-903d3fa3800b-kube-api-access-dfnvr\") on node \"crc\" DevicePath \"\"" Oct 06 13:58:56 crc kubenswrapper[4757]: I1006 13:58:56.563526 4757 scope.go:117] "RemoveContainer" containerID="57a99ae497bd37615f9b50961f0a594e23bcfd00f8c302548eee047e7257fee2" Oct 06 13:58:56 crc kubenswrapper[4757]: E1006 13:58:56.565177 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"57a99ae497bd37615f9b50961f0a594e23bcfd00f8c302548eee047e7257fee2\": container with ID starting with 57a99ae497bd37615f9b50961f0a594e23bcfd00f8c302548eee047e7257fee2 not found: ID does not exist" containerID="57a99ae497bd37615f9b50961f0a594e23bcfd00f8c302548eee047e7257fee2" Oct 06 13:58:56 crc kubenswrapper[4757]: I1006 13:58:56.565226 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"57a99ae497bd37615f9b50961f0a594e23bcfd00f8c302548eee047e7257fee2"} err="failed to get container status \"57a99ae497bd37615f9b50961f0a594e23bcfd00f8c302548eee047e7257fee2\": rpc error: code = NotFound desc = could not find container \"57a99ae497bd37615f9b50961f0a594e23bcfd00f8c302548eee047e7257fee2\": container with ID starting with 57a99ae497bd37615f9b50961f0a594e23bcfd00f8c302548eee047e7257fee2 not found: ID does not exist" Oct 06 13:58:56 crc kubenswrapper[4757]: I1006 13:58:56.565274 4757 scope.go:117] "RemoveContainer" containerID="2e60d3baa4da8dd6f518007d4f04dc3a8301c5bb3f84ed83aaf428eddf0f5fa9" Oct 06 13:58:56 crc kubenswrapper[4757]: E1006 13:58:56.565598 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2e60d3baa4da8dd6f518007d4f04dc3a8301c5bb3f84ed83aaf428eddf0f5fa9\": container with ID starting with 2e60d3baa4da8dd6f518007d4f04dc3a8301c5bb3f84ed83aaf428eddf0f5fa9 not found: ID does not exist" containerID="2e60d3baa4da8dd6f518007d4f04dc3a8301c5bb3f84ed83aaf428eddf0f5fa9" Oct 06 13:58:56 crc kubenswrapper[4757]: I1006 13:58:56.565632 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2e60d3baa4da8dd6f518007d4f04dc3a8301c5bb3f84ed83aaf428eddf0f5fa9"} err="failed to get container status \"2e60d3baa4da8dd6f518007d4f04dc3a8301c5bb3f84ed83aaf428eddf0f5fa9\": rpc error: code = NotFound desc = could not find container \"2e60d3baa4da8dd6f518007d4f04dc3a8301c5bb3f84ed83aaf428eddf0f5fa9\": container with ID starting with 2e60d3baa4da8dd6f518007d4f04dc3a8301c5bb3f84ed83aaf428eddf0f5fa9 not found: ID does not exist" Oct 06 13:58:56 crc kubenswrapper[4757]: I1006 13:58:56.651957 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bmbrm\" (UniqueName: \"kubernetes.io/projected/acb2a4db-c5b2-4d68-a1ab-bd3fcdf578cd-kube-api-access-bmbrm\") pod \"nova-scheduler-0\" (UID: \"acb2a4db-c5b2-4d68-a1ab-bd3fcdf578cd\") " pod="openstack/nova-scheduler-0" Oct 06 13:58:56 crc kubenswrapper[4757]: I1006 13:58:56.652366 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/acb2a4db-c5b2-4d68-a1ab-bd3fcdf578cd-config-data\") pod \"nova-scheduler-0\" (UID: \"acb2a4db-c5b2-4d68-a1ab-bd3fcdf578cd\") " pod="openstack/nova-scheduler-0" Oct 06 13:58:56 crc kubenswrapper[4757]: I1006 13:58:56.652524 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/acb2a4db-c5b2-4d68-a1ab-bd3fcdf578cd-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"acb2a4db-c5b2-4d68-a1ab-bd3fcdf578cd\") " pod="openstack/nova-scheduler-0" Oct 06 13:58:56 crc kubenswrapper[4757]: I1006 13:58:56.665022 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/acb2a4db-c5b2-4d68-a1ab-bd3fcdf578cd-config-data\") pod \"nova-scheduler-0\" (UID: \"acb2a4db-c5b2-4d68-a1ab-bd3fcdf578cd\") " pod="openstack/nova-scheduler-0" Oct 06 13:58:56 crc kubenswrapper[4757]: I1006 13:58:56.665051 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/acb2a4db-c5b2-4d68-a1ab-bd3fcdf578cd-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"acb2a4db-c5b2-4d68-a1ab-bd3fcdf578cd\") " pod="openstack/nova-scheduler-0" Oct 06 13:58:56 crc kubenswrapper[4757]: I1006 13:58:56.672169 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bmbrm\" (UniqueName: \"kubernetes.io/projected/acb2a4db-c5b2-4d68-a1ab-bd3fcdf578cd-kube-api-access-bmbrm\") pod \"nova-scheduler-0\" (UID: \"acb2a4db-c5b2-4d68-a1ab-bd3fcdf578cd\") " pod="openstack/nova-scheduler-0" Oct 06 13:58:56 crc kubenswrapper[4757]: I1006 13:58:56.737168 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 06 13:58:56 crc kubenswrapper[4757]: I1006 13:58:56.753563 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Oct 06 13:58:56 crc kubenswrapper[4757]: I1006 13:58:56.764219 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Oct 06 13:58:56 crc kubenswrapper[4757]: I1006 13:58:56.766395 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 06 13:58:56 crc kubenswrapper[4757]: I1006 13:58:56.774602 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 06 13:58:56 crc kubenswrapper[4757]: I1006 13:58:56.774868 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Oct 06 13:58:56 crc kubenswrapper[4757]: I1006 13:58:56.855830 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a2eb0fb1-47fb-410c-b154-6171760d857c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"a2eb0fb1-47fb-410c-b154-6171760d857c\") " pod="openstack/nova-api-0" Oct 06 13:58:56 crc kubenswrapper[4757]: I1006 13:58:56.856272 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a2eb0fb1-47fb-410c-b154-6171760d857c-logs\") pod \"nova-api-0\" (UID: \"a2eb0fb1-47fb-410c-b154-6171760d857c\") " pod="openstack/nova-api-0" Oct 06 13:58:56 crc kubenswrapper[4757]: I1006 13:58:56.856348 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a2eb0fb1-47fb-410c-b154-6171760d857c-config-data\") pod \"nova-api-0\" (UID: \"a2eb0fb1-47fb-410c-b154-6171760d857c\") " pod="openstack/nova-api-0" Oct 06 13:58:56 crc kubenswrapper[4757]: I1006 13:58:56.856503 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pncr6\" (UniqueName: \"kubernetes.io/projected/a2eb0fb1-47fb-410c-b154-6171760d857c-kube-api-access-pncr6\") pod \"nova-api-0\" (UID: \"a2eb0fb1-47fb-410c-b154-6171760d857c\") " pod="openstack/nova-api-0" Oct 06 13:58:56 crc kubenswrapper[4757]: I1006 13:58:56.879846 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 06 13:58:56 crc kubenswrapper[4757]: I1006 13:58:56.957190 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a2eb0fb1-47fb-410c-b154-6171760d857c-config-data\") pod \"nova-api-0\" (UID: \"a2eb0fb1-47fb-410c-b154-6171760d857c\") " pod="openstack/nova-api-0" Oct 06 13:58:56 crc kubenswrapper[4757]: I1006 13:58:56.957290 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pncr6\" (UniqueName: \"kubernetes.io/projected/a2eb0fb1-47fb-410c-b154-6171760d857c-kube-api-access-pncr6\") pod \"nova-api-0\" (UID: \"a2eb0fb1-47fb-410c-b154-6171760d857c\") " pod="openstack/nova-api-0" Oct 06 13:58:56 crc kubenswrapper[4757]: I1006 13:58:56.957365 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a2eb0fb1-47fb-410c-b154-6171760d857c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"a2eb0fb1-47fb-410c-b154-6171760d857c\") " pod="openstack/nova-api-0" Oct 06 13:58:56 crc kubenswrapper[4757]: I1006 13:58:56.957397 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a2eb0fb1-47fb-410c-b154-6171760d857c-logs\") pod \"nova-api-0\" (UID: \"a2eb0fb1-47fb-410c-b154-6171760d857c\") " pod="openstack/nova-api-0" Oct 06 13:58:56 crc kubenswrapper[4757]: I1006 13:58:56.957837 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a2eb0fb1-47fb-410c-b154-6171760d857c-logs\") pod \"nova-api-0\" (UID: \"a2eb0fb1-47fb-410c-b154-6171760d857c\") " pod="openstack/nova-api-0" Oct 06 13:58:56 crc kubenswrapper[4757]: I1006 13:58:56.962577 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a2eb0fb1-47fb-410c-b154-6171760d857c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"a2eb0fb1-47fb-410c-b154-6171760d857c\") " pod="openstack/nova-api-0" Oct 06 13:58:56 crc kubenswrapper[4757]: I1006 13:58:56.962782 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a2eb0fb1-47fb-410c-b154-6171760d857c-config-data\") pod \"nova-api-0\" (UID: \"a2eb0fb1-47fb-410c-b154-6171760d857c\") " pod="openstack/nova-api-0" Oct 06 13:58:56 crc kubenswrapper[4757]: I1006 13:58:56.995086 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pncr6\" (UniqueName: \"kubernetes.io/projected/a2eb0fb1-47fb-410c-b154-6171760d857c-kube-api-access-pncr6\") pod \"nova-api-0\" (UID: \"a2eb0fb1-47fb-410c-b154-6171760d857c\") " pod="openstack/nova-api-0" Oct 06 13:58:57 crc kubenswrapper[4757]: I1006 13:58:57.088362 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 06 13:58:57 crc kubenswrapper[4757]: I1006 13:58:57.145817 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 06 13:58:57 crc kubenswrapper[4757]: I1006 13:58:57.421233 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"acb2a4db-c5b2-4d68-a1ab-bd3fcdf578cd","Type":"ContainerStarted","Data":"cc57a918f77c5a7708d90b3d81c02a594ecb2bbeab6536376c5a5cbb9a9fb91a"} Oct 06 13:58:57 crc kubenswrapper[4757]: I1006 13:58:57.421629 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"acb2a4db-c5b2-4d68-a1ab-bd3fcdf578cd","Type":"ContainerStarted","Data":"b14ecb732ffb10a5e1338aa64f993ba95d6a265261506daac4e1d98eca5453e8"} Oct 06 13:58:57 crc kubenswrapper[4757]: I1006 13:58:57.446847 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=1.4468278940000001 podStartE2EDuration="1.446827894s" podCreationTimestamp="2025-10-06 13:58:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:58:57.442517985 +0000 UTC m=+1225.939836532" watchObservedRunningTime="2025-10-06 13:58:57.446827894 +0000 UTC m=+1225.944146431" Oct 06 13:58:57 crc kubenswrapper[4757]: W1006 13:58:57.564162 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda2eb0fb1_47fb_410c_b154_6171760d857c.slice/crio-0bbdbef559fc5f7133f4e9e6dd1eb647165766c25f3ca0910dd38ad62521cc43 WatchSource:0}: Error finding container 0bbdbef559fc5f7133f4e9e6dd1eb647165766c25f3ca0910dd38ad62521cc43: Status 404 returned error can't find the container with id 0bbdbef559fc5f7133f4e9e6dd1eb647165766c25f3ca0910dd38ad62521cc43 Oct 06 13:58:57 crc kubenswrapper[4757]: I1006 13:58:57.565234 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 06 13:58:57 crc kubenswrapper[4757]: I1006 13:58:57.748775 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 06 13:58:57 crc kubenswrapper[4757]: I1006 13:58:57.749210 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 06 13:58:58 crc kubenswrapper[4757]: I1006 13:58:58.195585 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0629d26d-a456-477c-9adf-54885b0ff36f" path="/var/lib/kubelet/pods/0629d26d-a456-477c-9adf-54885b0ff36f/volumes" Oct 06 13:58:58 crc kubenswrapper[4757]: I1006 13:58:58.219436 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cc57d075-eaca-4298-a76d-903d3fa3800b" path="/var/lib/kubelet/pods/cc57d075-eaca-4298-a76d-903d3fa3800b/volumes" Oct 06 13:58:58 crc kubenswrapper[4757]: I1006 13:58:58.433126 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"a2eb0fb1-47fb-410c-b154-6171760d857c","Type":"ContainerStarted","Data":"464f214dd793a85d11d749f8ee2265047b170992e7df44c39caeffc22a8d283c"} Oct 06 13:58:58 crc kubenswrapper[4757]: I1006 13:58:58.433178 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"a2eb0fb1-47fb-410c-b154-6171760d857c","Type":"ContainerStarted","Data":"2a5a23205a19ac1c4c50785b8b3723f68d147f42144ebb9895c80ad789e5eeda"} Oct 06 13:58:58 crc kubenswrapper[4757]: I1006 13:58:58.433190 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"a2eb0fb1-47fb-410c-b154-6171760d857c","Type":"ContainerStarted","Data":"0bbdbef559fc5f7133f4e9e6dd1eb647165766c25f3ca0910dd38ad62521cc43"} Oct 06 13:58:58 crc kubenswrapper[4757]: I1006 13:58:58.467631 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.467610996 podStartE2EDuration="2.467610996s" podCreationTimestamp="2025-10-06 13:58:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:58:58.459791186 +0000 UTC m=+1226.957109743" watchObservedRunningTime="2025-10-06 13:58:58.467610996 +0000 UTC m=+1226.964929523" Oct 06 13:58:59 crc kubenswrapper[4757]: I1006 13:58:59.537790 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 06 13:58:59 crc kubenswrapper[4757]: I1006 13:58:59.538416 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="7babda66-5edc-4775-b36d-e22b39689c1c" containerName="kube-state-metrics" containerID="cri-o://7a8008bac063a3f8ad0ed2c579aba104ad4776d0e1f4854a40cc26c2c229a5a9" gracePeriod=30 Oct 06 13:59:00 crc kubenswrapper[4757]: I1006 13:59:00.034697 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 06 13:59:00 crc kubenswrapper[4757]: I1006 13:59:00.113714 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t67mk\" (UniqueName: \"kubernetes.io/projected/7babda66-5edc-4775-b36d-e22b39689c1c-kube-api-access-t67mk\") pod \"7babda66-5edc-4775-b36d-e22b39689c1c\" (UID: \"7babda66-5edc-4775-b36d-e22b39689c1c\") " Oct 06 13:59:00 crc kubenswrapper[4757]: I1006 13:59:00.120276 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7babda66-5edc-4775-b36d-e22b39689c1c-kube-api-access-t67mk" (OuterVolumeSpecName: "kube-api-access-t67mk") pod "7babda66-5edc-4775-b36d-e22b39689c1c" (UID: "7babda66-5edc-4775-b36d-e22b39689c1c"). InnerVolumeSpecName "kube-api-access-t67mk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:59:00 crc kubenswrapper[4757]: I1006 13:59:00.215470 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t67mk\" (UniqueName: \"kubernetes.io/projected/7babda66-5edc-4775-b36d-e22b39689c1c-kube-api-access-t67mk\") on node \"crc\" DevicePath \"\"" Oct 06 13:59:00 crc kubenswrapper[4757]: I1006 13:59:00.452648 4757 generic.go:334] "Generic (PLEG): container finished" podID="7babda66-5edc-4775-b36d-e22b39689c1c" containerID="7a8008bac063a3f8ad0ed2c579aba104ad4776d0e1f4854a40cc26c2c229a5a9" exitCode=2 Oct 06 13:59:00 crc kubenswrapper[4757]: I1006 13:59:00.452706 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"7babda66-5edc-4775-b36d-e22b39689c1c","Type":"ContainerDied","Data":"7a8008bac063a3f8ad0ed2c579aba104ad4776d0e1f4854a40cc26c2c229a5a9"} Oct 06 13:59:00 crc kubenswrapper[4757]: I1006 13:59:00.452746 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 06 13:59:00 crc kubenswrapper[4757]: I1006 13:59:00.452770 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"7babda66-5edc-4775-b36d-e22b39689c1c","Type":"ContainerDied","Data":"24bd1cc377a26b91402fcdbb5b0be65de1eec0830d67411762890802bbdb509c"} Oct 06 13:59:00 crc kubenswrapper[4757]: I1006 13:59:00.452792 4757 scope.go:117] "RemoveContainer" containerID="7a8008bac063a3f8ad0ed2c579aba104ad4776d0e1f4854a40cc26c2c229a5a9" Oct 06 13:59:00 crc kubenswrapper[4757]: I1006 13:59:00.479699 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 06 13:59:00 crc kubenswrapper[4757]: I1006 13:59:00.490347 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 06 13:59:00 crc kubenswrapper[4757]: I1006 13:59:00.498829 4757 scope.go:117] "RemoveContainer" containerID="7a8008bac063a3f8ad0ed2c579aba104ad4776d0e1f4854a40cc26c2c229a5a9" Oct 06 13:59:00 crc kubenswrapper[4757]: E1006 13:59:00.499452 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7a8008bac063a3f8ad0ed2c579aba104ad4776d0e1f4854a40cc26c2c229a5a9\": container with ID starting with 7a8008bac063a3f8ad0ed2c579aba104ad4776d0e1f4854a40cc26c2c229a5a9 not found: ID does not exist" containerID="7a8008bac063a3f8ad0ed2c579aba104ad4776d0e1f4854a40cc26c2c229a5a9" Oct 06 13:59:00 crc kubenswrapper[4757]: I1006 13:59:00.499506 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7a8008bac063a3f8ad0ed2c579aba104ad4776d0e1f4854a40cc26c2c229a5a9"} err="failed to get container status \"7a8008bac063a3f8ad0ed2c579aba104ad4776d0e1f4854a40cc26c2c229a5a9\": rpc error: code = NotFound desc = could not find container \"7a8008bac063a3f8ad0ed2c579aba104ad4776d0e1f4854a40cc26c2c229a5a9\": container with ID starting with 7a8008bac063a3f8ad0ed2c579aba104ad4776d0e1f4854a40cc26c2c229a5a9 not found: ID does not exist" Oct 06 13:59:00 crc kubenswrapper[4757]: I1006 13:59:00.503760 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Oct 06 13:59:00 crc kubenswrapper[4757]: E1006 13:59:00.504250 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7babda66-5edc-4775-b36d-e22b39689c1c" containerName="kube-state-metrics" Oct 06 13:59:00 crc kubenswrapper[4757]: I1006 13:59:00.504271 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="7babda66-5edc-4775-b36d-e22b39689c1c" containerName="kube-state-metrics" Oct 06 13:59:00 crc kubenswrapper[4757]: I1006 13:59:00.504507 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="7babda66-5edc-4775-b36d-e22b39689c1c" containerName="kube-state-metrics" Oct 06 13:59:00 crc kubenswrapper[4757]: I1006 13:59:00.505304 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 06 13:59:00 crc kubenswrapper[4757]: I1006 13:59:00.508141 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"kube-state-metrics-tls-config" Oct 06 13:59:00 crc kubenswrapper[4757]: I1006 13:59:00.508586 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-kube-state-metrics-svc" Oct 06 13:59:00 crc kubenswrapper[4757]: I1006 13:59:00.514890 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 06 13:59:00 crc kubenswrapper[4757]: I1006 13:59:00.621940 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/c5b34617-b163-4b7a-9950-53f64a8cca2c-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"c5b34617-b163-4b7a-9950-53f64a8cca2c\") " pod="openstack/kube-state-metrics-0" Oct 06 13:59:00 crc kubenswrapper[4757]: I1006 13:59:00.622351 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c5b34617-b163-4b7a-9950-53f64a8cca2c-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"c5b34617-b163-4b7a-9950-53f64a8cca2c\") " pod="openstack/kube-state-metrics-0" Oct 06 13:59:00 crc kubenswrapper[4757]: I1006 13:59:00.622383 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8lpcc\" (UniqueName: \"kubernetes.io/projected/c5b34617-b163-4b7a-9950-53f64a8cca2c-kube-api-access-8lpcc\") pod \"kube-state-metrics-0\" (UID: \"c5b34617-b163-4b7a-9950-53f64a8cca2c\") " pod="openstack/kube-state-metrics-0" Oct 06 13:59:00 crc kubenswrapper[4757]: I1006 13:59:00.622402 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/c5b34617-b163-4b7a-9950-53f64a8cca2c-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"c5b34617-b163-4b7a-9950-53f64a8cca2c\") " pod="openstack/kube-state-metrics-0" Oct 06 13:59:00 crc kubenswrapper[4757]: I1006 13:59:00.723877 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c5b34617-b163-4b7a-9950-53f64a8cca2c-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"c5b34617-b163-4b7a-9950-53f64a8cca2c\") " pod="openstack/kube-state-metrics-0" Oct 06 13:59:00 crc kubenswrapper[4757]: I1006 13:59:00.723945 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8lpcc\" (UniqueName: \"kubernetes.io/projected/c5b34617-b163-4b7a-9950-53f64a8cca2c-kube-api-access-8lpcc\") pod \"kube-state-metrics-0\" (UID: \"c5b34617-b163-4b7a-9950-53f64a8cca2c\") " pod="openstack/kube-state-metrics-0" Oct 06 13:59:00 crc kubenswrapper[4757]: I1006 13:59:00.723968 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/c5b34617-b163-4b7a-9950-53f64a8cca2c-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"c5b34617-b163-4b7a-9950-53f64a8cca2c\") " pod="openstack/kube-state-metrics-0" Oct 06 13:59:00 crc kubenswrapper[4757]: I1006 13:59:00.724083 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/c5b34617-b163-4b7a-9950-53f64a8cca2c-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"c5b34617-b163-4b7a-9950-53f64a8cca2c\") " pod="openstack/kube-state-metrics-0" Oct 06 13:59:00 crc kubenswrapper[4757]: I1006 13:59:00.731496 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c5b34617-b163-4b7a-9950-53f64a8cca2c-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"c5b34617-b163-4b7a-9950-53f64a8cca2c\") " pod="openstack/kube-state-metrics-0" Oct 06 13:59:00 crc kubenswrapper[4757]: I1006 13:59:00.731527 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/c5b34617-b163-4b7a-9950-53f64a8cca2c-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"c5b34617-b163-4b7a-9950-53f64a8cca2c\") " pod="openstack/kube-state-metrics-0" Oct 06 13:59:00 crc kubenswrapper[4757]: I1006 13:59:00.731740 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/c5b34617-b163-4b7a-9950-53f64a8cca2c-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"c5b34617-b163-4b7a-9950-53f64a8cca2c\") " pod="openstack/kube-state-metrics-0" Oct 06 13:59:00 crc kubenswrapper[4757]: I1006 13:59:00.744229 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8lpcc\" (UniqueName: \"kubernetes.io/projected/c5b34617-b163-4b7a-9950-53f64a8cca2c-kube-api-access-8lpcc\") pod \"kube-state-metrics-0\" (UID: \"c5b34617-b163-4b7a-9950-53f64a8cca2c\") " pod="openstack/kube-state-metrics-0" Oct 06 13:59:00 crc kubenswrapper[4757]: I1006 13:59:00.787438 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Oct 06 13:59:00 crc kubenswrapper[4757]: I1006 13:59:00.826043 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 06 13:59:01 crc kubenswrapper[4757]: I1006 13:59:01.300619 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 06 13:59:01 crc kubenswrapper[4757]: I1006 13:59:01.462156 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"c5b34617-b163-4b7a-9950-53f64a8cca2c","Type":"ContainerStarted","Data":"abc59daaf7bc965011509a5abe63acfe29e77cd751ce4c4da520fb488e2e5282"} Oct 06 13:59:01 crc kubenswrapper[4757]: I1006 13:59:01.655206 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 06 13:59:01 crc kubenswrapper[4757]: I1006 13:59:01.655723 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47" containerName="ceilometer-central-agent" containerID="cri-o://ac3bad272e5564633193b1c1997f349311b95475590b3bd7e88073bb397a0220" gracePeriod=30 Oct 06 13:59:01 crc kubenswrapper[4757]: I1006 13:59:01.656695 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47" containerName="proxy-httpd" containerID="cri-o://ea2cdab8c1132befe3158a8a0f7e7deff0026d038ebdef647b5c872e4d0bc30c" gracePeriod=30 Oct 06 13:59:01 crc kubenswrapper[4757]: I1006 13:59:01.656788 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47" containerName="sg-core" containerID="cri-o://b5bc4119c5a857d4b1ba2eb009cf2df1068fde0166eef78dcff4afde617f17fa" gracePeriod=30 Oct 06 13:59:01 crc kubenswrapper[4757]: I1006 13:59:01.656848 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47" containerName="ceilometer-notification-agent" containerID="cri-o://658cb7c51d7f66f97a39766bccc5829bc3886955f1f1889fb60f7608fa3c1727" gracePeriod=30 Oct 06 13:59:01 crc kubenswrapper[4757]: I1006 13:59:01.879964 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Oct 06 13:59:01 crc kubenswrapper[4757]: E1006 13:59:01.898834 4757 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podab4e3bc3_86a8_46cf_a54a_c0180d8dfe47.slice/crio-conmon-b5bc4119c5a857d4b1ba2eb009cf2df1068fde0166eef78dcff4afde617f17fa.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podab4e3bc3_86a8_46cf_a54a_c0180d8dfe47.slice/crio-b5bc4119c5a857d4b1ba2eb009cf2df1068fde0166eef78dcff4afde617f17fa.scope\": RecentStats: unable to find data in memory cache]" Oct 06 13:59:02 crc kubenswrapper[4757]: I1006 13:59:02.191963 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7babda66-5edc-4775-b36d-e22b39689c1c" path="/var/lib/kubelet/pods/7babda66-5edc-4775-b36d-e22b39689c1c/volumes" Oct 06 13:59:02 crc kubenswrapper[4757]: I1006 13:59:02.476364 4757 generic.go:334] "Generic (PLEG): container finished" podID="ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47" containerID="ea2cdab8c1132befe3158a8a0f7e7deff0026d038ebdef647b5c872e4d0bc30c" exitCode=0 Oct 06 13:59:02 crc kubenswrapper[4757]: I1006 13:59:02.476418 4757 generic.go:334] "Generic (PLEG): container finished" podID="ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47" containerID="b5bc4119c5a857d4b1ba2eb009cf2df1068fde0166eef78dcff4afde617f17fa" exitCode=2 Oct 06 13:59:02 crc kubenswrapper[4757]: I1006 13:59:02.476429 4757 generic.go:334] "Generic (PLEG): container finished" podID="ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47" containerID="ac3bad272e5564633193b1c1997f349311b95475590b3bd7e88073bb397a0220" exitCode=0 Oct 06 13:59:02 crc kubenswrapper[4757]: I1006 13:59:02.476509 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47","Type":"ContainerDied","Data":"ea2cdab8c1132befe3158a8a0f7e7deff0026d038ebdef647b5c872e4d0bc30c"} Oct 06 13:59:02 crc kubenswrapper[4757]: I1006 13:59:02.476542 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47","Type":"ContainerDied","Data":"b5bc4119c5a857d4b1ba2eb009cf2df1068fde0166eef78dcff4afde617f17fa"} Oct 06 13:59:02 crc kubenswrapper[4757]: I1006 13:59:02.476574 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47","Type":"ContainerDied","Data":"ac3bad272e5564633193b1c1997f349311b95475590b3bd7e88073bb397a0220"} Oct 06 13:59:02 crc kubenswrapper[4757]: I1006 13:59:02.478607 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"c5b34617-b163-4b7a-9950-53f64a8cca2c","Type":"ContainerStarted","Data":"947f0a3b15f660d4a027bf43a3fba73c9916d632976aead40013fc2ec14335b9"} Oct 06 13:59:02 crc kubenswrapper[4757]: I1006 13:59:02.478787 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Oct 06 13:59:02 crc kubenswrapper[4757]: I1006 13:59:02.491366 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=2.050349622 podStartE2EDuration="2.49134969s" podCreationTimestamp="2025-10-06 13:59:00 +0000 UTC" firstStartedPulling="2025-10-06 13:59:01.297637884 +0000 UTC m=+1229.794956421" lastFinishedPulling="2025-10-06 13:59:01.738637952 +0000 UTC m=+1230.235956489" observedRunningTime="2025-10-06 13:59:02.49132423 +0000 UTC m=+1230.988642767" watchObservedRunningTime="2025-10-06 13:59:02.49134969 +0000 UTC m=+1230.988668227" Oct 06 13:59:02 crc kubenswrapper[4757]: I1006 13:59:02.748614 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Oct 06 13:59:02 crc kubenswrapper[4757]: I1006 13:59:02.748679 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Oct 06 13:59:03 crc kubenswrapper[4757]: I1006 13:59:03.454922 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 06 13:59:03 crc kubenswrapper[4757]: I1006 13:59:03.489871 4757 generic.go:334] "Generic (PLEG): container finished" podID="ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47" containerID="658cb7c51d7f66f97a39766bccc5829bc3886955f1f1889fb60f7608fa3c1727" exitCode=0 Oct 06 13:59:03 crc kubenswrapper[4757]: I1006 13:59:03.490746 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 06 13:59:03 crc kubenswrapper[4757]: I1006 13:59:03.500311 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47","Type":"ContainerDied","Data":"658cb7c51d7f66f97a39766bccc5829bc3886955f1f1889fb60f7608fa3c1727"} Oct 06 13:59:03 crc kubenswrapper[4757]: I1006 13:59:03.500365 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47","Type":"ContainerDied","Data":"6f958aff6164c793b5c614726be15603a6af34e40faf68d041cd55f645e95bdd"} Oct 06 13:59:03 crc kubenswrapper[4757]: I1006 13:59:03.500387 4757 scope.go:117] "RemoveContainer" containerID="ea2cdab8c1132befe3158a8a0f7e7deff0026d038ebdef647b5c872e4d0bc30c" Oct 06 13:59:03 crc kubenswrapper[4757]: I1006 13:59:03.553700 4757 scope.go:117] "RemoveContainer" containerID="b5bc4119c5a857d4b1ba2eb009cf2df1068fde0166eef78dcff4afde617f17fa" Oct 06 13:59:03 crc kubenswrapper[4757]: I1006 13:59:03.575196 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47-run-httpd\") pod \"ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47\" (UID: \"ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47\") " Oct 06 13:59:03 crc kubenswrapper[4757]: I1006 13:59:03.575314 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4rvqj\" (UniqueName: \"kubernetes.io/projected/ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47-kube-api-access-4rvqj\") pod \"ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47\" (UID: \"ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47\") " Oct 06 13:59:03 crc kubenswrapper[4757]: I1006 13:59:03.575342 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47-scripts\") pod \"ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47\" (UID: \"ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47\") " Oct 06 13:59:03 crc kubenswrapper[4757]: I1006 13:59:03.575406 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47-sg-core-conf-yaml\") pod \"ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47\" (UID: \"ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47\") " Oct 06 13:59:03 crc kubenswrapper[4757]: I1006 13:59:03.575454 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47-log-httpd\") pod \"ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47\" (UID: \"ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47\") " Oct 06 13:59:03 crc kubenswrapper[4757]: I1006 13:59:03.575469 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47-combined-ca-bundle\") pod \"ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47\" (UID: \"ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47\") " Oct 06 13:59:03 crc kubenswrapper[4757]: I1006 13:59:03.575543 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47-config-data\") pod \"ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47\" (UID: \"ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47\") " Oct 06 13:59:03 crc kubenswrapper[4757]: I1006 13:59:03.575607 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47" (UID: "ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 13:59:03 crc kubenswrapper[4757]: I1006 13:59:03.575869 4757 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 06 13:59:03 crc kubenswrapper[4757]: I1006 13:59:03.576525 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47" (UID: "ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 13:59:03 crc kubenswrapper[4757]: I1006 13:59:03.583493 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47-scripts" (OuterVolumeSpecName: "scripts") pod "ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47" (UID: "ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:59:03 crc kubenswrapper[4757]: I1006 13:59:03.583643 4757 scope.go:117] "RemoveContainer" containerID="658cb7c51d7f66f97a39766bccc5829bc3886955f1f1889fb60f7608fa3c1727" Oct 06 13:59:03 crc kubenswrapper[4757]: I1006 13:59:03.583897 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47-kube-api-access-4rvqj" (OuterVolumeSpecName: "kube-api-access-4rvqj") pod "ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47" (UID: "ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47"). InnerVolumeSpecName "kube-api-access-4rvqj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:59:03 crc kubenswrapper[4757]: I1006 13:59:03.613912 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47" (UID: "ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:59:03 crc kubenswrapper[4757]: I1006 13:59:03.672482 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47" (UID: "ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:59:03 crc kubenswrapper[4757]: I1006 13:59:03.681605 4757 scope.go:117] "RemoveContainer" containerID="ac3bad272e5564633193b1c1997f349311b95475590b3bd7e88073bb397a0220" Oct 06 13:59:03 crc kubenswrapper[4757]: I1006 13:59:03.685222 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4rvqj\" (UniqueName: \"kubernetes.io/projected/ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47-kube-api-access-4rvqj\") on node \"crc\" DevicePath \"\"" Oct 06 13:59:03 crc kubenswrapper[4757]: I1006 13:59:03.685365 4757 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47-scripts\") on node \"crc\" DevicePath \"\"" Oct 06 13:59:03 crc kubenswrapper[4757]: I1006 13:59:03.685453 4757 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 06 13:59:03 crc kubenswrapper[4757]: I1006 13:59:03.685543 4757 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 06 13:59:03 crc kubenswrapper[4757]: I1006 13:59:03.685632 4757 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 06 13:59:03 crc kubenswrapper[4757]: I1006 13:59:03.704379 4757 scope.go:117] "RemoveContainer" containerID="ea2cdab8c1132befe3158a8a0f7e7deff0026d038ebdef647b5c872e4d0bc30c" Oct 06 13:59:03 crc kubenswrapper[4757]: E1006 13:59:03.704776 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ea2cdab8c1132befe3158a8a0f7e7deff0026d038ebdef647b5c872e4d0bc30c\": container with ID starting with ea2cdab8c1132befe3158a8a0f7e7deff0026d038ebdef647b5c872e4d0bc30c not found: ID does not exist" containerID="ea2cdab8c1132befe3158a8a0f7e7deff0026d038ebdef647b5c872e4d0bc30c" Oct 06 13:59:03 crc kubenswrapper[4757]: I1006 13:59:03.704906 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ea2cdab8c1132befe3158a8a0f7e7deff0026d038ebdef647b5c872e4d0bc30c"} err="failed to get container status \"ea2cdab8c1132befe3158a8a0f7e7deff0026d038ebdef647b5c872e4d0bc30c\": rpc error: code = NotFound desc = could not find container \"ea2cdab8c1132befe3158a8a0f7e7deff0026d038ebdef647b5c872e4d0bc30c\": container with ID starting with ea2cdab8c1132befe3158a8a0f7e7deff0026d038ebdef647b5c872e4d0bc30c not found: ID does not exist" Oct 06 13:59:03 crc kubenswrapper[4757]: I1006 13:59:03.705311 4757 scope.go:117] "RemoveContainer" containerID="b5bc4119c5a857d4b1ba2eb009cf2df1068fde0166eef78dcff4afde617f17fa" Oct 06 13:59:03 crc kubenswrapper[4757]: E1006 13:59:03.705632 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b5bc4119c5a857d4b1ba2eb009cf2df1068fde0166eef78dcff4afde617f17fa\": container with ID starting with b5bc4119c5a857d4b1ba2eb009cf2df1068fde0166eef78dcff4afde617f17fa not found: ID does not exist" containerID="b5bc4119c5a857d4b1ba2eb009cf2df1068fde0166eef78dcff4afde617f17fa" Oct 06 13:59:03 crc kubenswrapper[4757]: I1006 13:59:03.705663 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b5bc4119c5a857d4b1ba2eb009cf2df1068fde0166eef78dcff4afde617f17fa"} err="failed to get container status \"b5bc4119c5a857d4b1ba2eb009cf2df1068fde0166eef78dcff4afde617f17fa\": rpc error: code = NotFound desc = could not find container \"b5bc4119c5a857d4b1ba2eb009cf2df1068fde0166eef78dcff4afde617f17fa\": container with ID starting with b5bc4119c5a857d4b1ba2eb009cf2df1068fde0166eef78dcff4afde617f17fa not found: ID does not exist" Oct 06 13:59:03 crc kubenswrapper[4757]: I1006 13:59:03.705685 4757 scope.go:117] "RemoveContainer" containerID="658cb7c51d7f66f97a39766bccc5829bc3886955f1f1889fb60f7608fa3c1727" Oct 06 13:59:03 crc kubenswrapper[4757]: E1006 13:59:03.705910 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"658cb7c51d7f66f97a39766bccc5829bc3886955f1f1889fb60f7608fa3c1727\": container with ID starting with 658cb7c51d7f66f97a39766bccc5829bc3886955f1f1889fb60f7608fa3c1727 not found: ID does not exist" containerID="658cb7c51d7f66f97a39766bccc5829bc3886955f1f1889fb60f7608fa3c1727" Oct 06 13:59:03 crc kubenswrapper[4757]: I1006 13:59:03.705932 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"658cb7c51d7f66f97a39766bccc5829bc3886955f1f1889fb60f7608fa3c1727"} err="failed to get container status \"658cb7c51d7f66f97a39766bccc5829bc3886955f1f1889fb60f7608fa3c1727\": rpc error: code = NotFound desc = could not find container \"658cb7c51d7f66f97a39766bccc5829bc3886955f1f1889fb60f7608fa3c1727\": container with ID starting with 658cb7c51d7f66f97a39766bccc5829bc3886955f1f1889fb60f7608fa3c1727 not found: ID does not exist" Oct 06 13:59:03 crc kubenswrapper[4757]: I1006 13:59:03.705950 4757 scope.go:117] "RemoveContainer" containerID="ac3bad272e5564633193b1c1997f349311b95475590b3bd7e88073bb397a0220" Oct 06 13:59:03 crc kubenswrapper[4757]: E1006 13:59:03.706172 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ac3bad272e5564633193b1c1997f349311b95475590b3bd7e88073bb397a0220\": container with ID starting with ac3bad272e5564633193b1c1997f349311b95475590b3bd7e88073bb397a0220 not found: ID does not exist" containerID="ac3bad272e5564633193b1c1997f349311b95475590b3bd7e88073bb397a0220" Oct 06 13:59:03 crc kubenswrapper[4757]: I1006 13:59:03.706196 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ac3bad272e5564633193b1c1997f349311b95475590b3bd7e88073bb397a0220"} err="failed to get container status \"ac3bad272e5564633193b1c1997f349311b95475590b3bd7e88073bb397a0220\": rpc error: code = NotFound desc = could not find container \"ac3bad272e5564633193b1c1997f349311b95475590b3bd7e88073bb397a0220\": container with ID starting with ac3bad272e5564633193b1c1997f349311b95475590b3bd7e88073bb397a0220 not found: ID does not exist" Oct 06 13:59:03 crc kubenswrapper[4757]: I1006 13:59:03.710335 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47-config-data" (OuterVolumeSpecName: "config-data") pod "ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47" (UID: "ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:59:03 crc kubenswrapper[4757]: I1006 13:59:03.760265 4757 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="3dac5395-f6e7-4731-af9c-813f1863e380" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.192:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 06 13:59:03 crc kubenswrapper[4757]: I1006 13:59:03.760528 4757 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="3dac5395-f6e7-4731-af9c-813f1863e380" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.192:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 06 13:59:03 crc kubenswrapper[4757]: I1006 13:59:03.786851 4757 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47-config-data\") on node \"crc\" DevicePath \"\"" Oct 06 13:59:03 crc kubenswrapper[4757]: I1006 13:59:03.825356 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 06 13:59:03 crc kubenswrapper[4757]: I1006 13:59:03.836569 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 06 13:59:03 crc kubenswrapper[4757]: I1006 13:59:03.847268 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 06 13:59:03 crc kubenswrapper[4757]: E1006 13:59:03.847697 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47" containerName="sg-core" Oct 06 13:59:03 crc kubenswrapper[4757]: I1006 13:59:03.847719 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47" containerName="sg-core" Oct 06 13:59:03 crc kubenswrapper[4757]: E1006 13:59:03.847736 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47" containerName="proxy-httpd" Oct 06 13:59:03 crc kubenswrapper[4757]: I1006 13:59:03.847743 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47" containerName="proxy-httpd" Oct 06 13:59:03 crc kubenswrapper[4757]: E1006 13:59:03.847773 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47" containerName="ceilometer-central-agent" Oct 06 13:59:03 crc kubenswrapper[4757]: I1006 13:59:03.847781 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47" containerName="ceilometer-central-agent" Oct 06 13:59:03 crc kubenswrapper[4757]: E1006 13:59:03.847799 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47" containerName="ceilometer-notification-agent" Oct 06 13:59:03 crc kubenswrapper[4757]: I1006 13:59:03.847806 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47" containerName="ceilometer-notification-agent" Oct 06 13:59:03 crc kubenswrapper[4757]: I1006 13:59:03.848027 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47" containerName="ceilometer-notification-agent" Oct 06 13:59:03 crc kubenswrapper[4757]: I1006 13:59:03.848046 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47" containerName="ceilometer-central-agent" Oct 06 13:59:03 crc kubenswrapper[4757]: I1006 13:59:03.848060 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47" containerName="sg-core" Oct 06 13:59:03 crc kubenswrapper[4757]: I1006 13:59:03.848081 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47" containerName="proxy-httpd" Oct 06 13:59:03 crc kubenswrapper[4757]: I1006 13:59:03.851157 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 06 13:59:03 crc kubenswrapper[4757]: I1006 13:59:03.853238 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 06 13:59:03 crc kubenswrapper[4757]: I1006 13:59:03.853413 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 06 13:59:03 crc kubenswrapper[4757]: I1006 13:59:03.853423 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Oct 06 13:59:03 crc kubenswrapper[4757]: I1006 13:59:03.862683 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 06 13:59:03 crc kubenswrapper[4757]: I1006 13:59:03.990317 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/df05d43b-220d-4ccc-98b1-8c3f01cdd9f5-scripts\") pod \"ceilometer-0\" (UID: \"df05d43b-220d-4ccc-98b1-8c3f01cdd9f5\") " pod="openstack/ceilometer-0" Oct 06 13:59:03 crc kubenswrapper[4757]: I1006 13:59:03.990413 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/df05d43b-220d-4ccc-98b1-8c3f01cdd9f5-log-httpd\") pod \"ceilometer-0\" (UID: \"df05d43b-220d-4ccc-98b1-8c3f01cdd9f5\") " pod="openstack/ceilometer-0" Oct 06 13:59:03 crc kubenswrapper[4757]: I1006 13:59:03.990484 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/df05d43b-220d-4ccc-98b1-8c3f01cdd9f5-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"df05d43b-220d-4ccc-98b1-8c3f01cdd9f5\") " pod="openstack/ceilometer-0" Oct 06 13:59:03 crc kubenswrapper[4757]: I1006 13:59:03.990504 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/df05d43b-220d-4ccc-98b1-8c3f01cdd9f5-run-httpd\") pod \"ceilometer-0\" (UID: \"df05d43b-220d-4ccc-98b1-8c3f01cdd9f5\") " pod="openstack/ceilometer-0" Oct 06 13:59:03 crc kubenswrapper[4757]: I1006 13:59:03.990575 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/df05d43b-220d-4ccc-98b1-8c3f01cdd9f5-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"df05d43b-220d-4ccc-98b1-8c3f01cdd9f5\") " pod="openstack/ceilometer-0" Oct 06 13:59:03 crc kubenswrapper[4757]: I1006 13:59:03.990646 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5n5cn\" (UniqueName: \"kubernetes.io/projected/df05d43b-220d-4ccc-98b1-8c3f01cdd9f5-kube-api-access-5n5cn\") pod \"ceilometer-0\" (UID: \"df05d43b-220d-4ccc-98b1-8c3f01cdd9f5\") " pod="openstack/ceilometer-0" Oct 06 13:59:03 crc kubenswrapper[4757]: I1006 13:59:03.990693 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/df05d43b-220d-4ccc-98b1-8c3f01cdd9f5-config-data\") pod \"ceilometer-0\" (UID: \"df05d43b-220d-4ccc-98b1-8c3f01cdd9f5\") " pod="openstack/ceilometer-0" Oct 06 13:59:03 crc kubenswrapper[4757]: I1006 13:59:03.990715 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/df05d43b-220d-4ccc-98b1-8c3f01cdd9f5-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"df05d43b-220d-4ccc-98b1-8c3f01cdd9f5\") " pod="openstack/ceilometer-0" Oct 06 13:59:04 crc kubenswrapper[4757]: I1006 13:59:04.092110 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/df05d43b-220d-4ccc-98b1-8c3f01cdd9f5-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"df05d43b-220d-4ccc-98b1-8c3f01cdd9f5\") " pod="openstack/ceilometer-0" Oct 06 13:59:04 crc kubenswrapper[4757]: I1006 13:59:04.092161 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/df05d43b-220d-4ccc-98b1-8c3f01cdd9f5-run-httpd\") pod \"ceilometer-0\" (UID: \"df05d43b-220d-4ccc-98b1-8c3f01cdd9f5\") " pod="openstack/ceilometer-0" Oct 06 13:59:04 crc kubenswrapper[4757]: I1006 13:59:04.092183 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/df05d43b-220d-4ccc-98b1-8c3f01cdd9f5-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"df05d43b-220d-4ccc-98b1-8c3f01cdd9f5\") " pod="openstack/ceilometer-0" Oct 06 13:59:04 crc kubenswrapper[4757]: I1006 13:59:04.092212 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5n5cn\" (UniqueName: \"kubernetes.io/projected/df05d43b-220d-4ccc-98b1-8c3f01cdd9f5-kube-api-access-5n5cn\") pod \"ceilometer-0\" (UID: \"df05d43b-220d-4ccc-98b1-8c3f01cdd9f5\") " pod="openstack/ceilometer-0" Oct 06 13:59:04 crc kubenswrapper[4757]: I1006 13:59:04.092245 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/df05d43b-220d-4ccc-98b1-8c3f01cdd9f5-config-data\") pod \"ceilometer-0\" (UID: \"df05d43b-220d-4ccc-98b1-8c3f01cdd9f5\") " pod="openstack/ceilometer-0" Oct 06 13:59:04 crc kubenswrapper[4757]: I1006 13:59:04.092266 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/df05d43b-220d-4ccc-98b1-8c3f01cdd9f5-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"df05d43b-220d-4ccc-98b1-8c3f01cdd9f5\") " pod="openstack/ceilometer-0" Oct 06 13:59:04 crc kubenswrapper[4757]: I1006 13:59:04.092345 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/df05d43b-220d-4ccc-98b1-8c3f01cdd9f5-scripts\") pod \"ceilometer-0\" (UID: \"df05d43b-220d-4ccc-98b1-8c3f01cdd9f5\") " pod="openstack/ceilometer-0" Oct 06 13:59:04 crc kubenswrapper[4757]: I1006 13:59:04.092426 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/df05d43b-220d-4ccc-98b1-8c3f01cdd9f5-log-httpd\") pod \"ceilometer-0\" (UID: \"df05d43b-220d-4ccc-98b1-8c3f01cdd9f5\") " pod="openstack/ceilometer-0" Oct 06 13:59:04 crc kubenswrapper[4757]: I1006 13:59:04.092881 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/df05d43b-220d-4ccc-98b1-8c3f01cdd9f5-log-httpd\") pod \"ceilometer-0\" (UID: \"df05d43b-220d-4ccc-98b1-8c3f01cdd9f5\") " pod="openstack/ceilometer-0" Oct 06 13:59:04 crc kubenswrapper[4757]: I1006 13:59:04.093368 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/df05d43b-220d-4ccc-98b1-8c3f01cdd9f5-run-httpd\") pod \"ceilometer-0\" (UID: \"df05d43b-220d-4ccc-98b1-8c3f01cdd9f5\") " pod="openstack/ceilometer-0" Oct 06 13:59:04 crc kubenswrapper[4757]: I1006 13:59:04.098082 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/df05d43b-220d-4ccc-98b1-8c3f01cdd9f5-config-data\") pod \"ceilometer-0\" (UID: \"df05d43b-220d-4ccc-98b1-8c3f01cdd9f5\") " pod="openstack/ceilometer-0" Oct 06 13:59:04 crc kubenswrapper[4757]: I1006 13:59:04.098760 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/df05d43b-220d-4ccc-98b1-8c3f01cdd9f5-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"df05d43b-220d-4ccc-98b1-8c3f01cdd9f5\") " pod="openstack/ceilometer-0" Oct 06 13:59:04 crc kubenswrapper[4757]: I1006 13:59:04.101563 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/df05d43b-220d-4ccc-98b1-8c3f01cdd9f5-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"df05d43b-220d-4ccc-98b1-8c3f01cdd9f5\") " pod="openstack/ceilometer-0" Oct 06 13:59:04 crc kubenswrapper[4757]: I1006 13:59:04.102149 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/df05d43b-220d-4ccc-98b1-8c3f01cdd9f5-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"df05d43b-220d-4ccc-98b1-8c3f01cdd9f5\") " pod="openstack/ceilometer-0" Oct 06 13:59:04 crc kubenswrapper[4757]: I1006 13:59:04.106063 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/df05d43b-220d-4ccc-98b1-8c3f01cdd9f5-scripts\") pod \"ceilometer-0\" (UID: \"df05d43b-220d-4ccc-98b1-8c3f01cdd9f5\") " pod="openstack/ceilometer-0" Oct 06 13:59:04 crc kubenswrapper[4757]: I1006 13:59:04.108257 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5n5cn\" (UniqueName: \"kubernetes.io/projected/df05d43b-220d-4ccc-98b1-8c3f01cdd9f5-kube-api-access-5n5cn\") pod \"ceilometer-0\" (UID: \"df05d43b-220d-4ccc-98b1-8c3f01cdd9f5\") " pod="openstack/ceilometer-0" Oct 06 13:59:04 crc kubenswrapper[4757]: I1006 13:59:04.179328 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 06 13:59:04 crc kubenswrapper[4757]: I1006 13:59:04.206202 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47" path="/var/lib/kubelet/pods/ab4e3bc3-86a8-46cf-a54a-c0180d8dfe47/volumes" Oct 06 13:59:04 crc kubenswrapper[4757]: I1006 13:59:04.672281 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 06 13:59:05 crc kubenswrapper[4757]: I1006 13:59:05.510804 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"df05d43b-220d-4ccc-98b1-8c3f01cdd9f5","Type":"ContainerStarted","Data":"4218c1ced5dfbb639dfe8deca37c3e72a0d75c9146d563e4d22d017f28a4f141"} Oct 06 13:59:06 crc kubenswrapper[4757]: I1006 13:59:06.521074 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"df05d43b-220d-4ccc-98b1-8c3f01cdd9f5","Type":"ContainerStarted","Data":"17911343114f9553ec28f6b524a189fb1c758a3671578f2e794d174cf02a32b7"} Oct 06 13:59:06 crc kubenswrapper[4757]: I1006 13:59:06.521655 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"df05d43b-220d-4ccc-98b1-8c3f01cdd9f5","Type":"ContainerStarted","Data":"ad11ddf01a2aa52555d0fd010110740a7ac7199f08e49ffe5be2a2226e969a68"} Oct 06 13:59:06 crc kubenswrapper[4757]: I1006 13:59:06.881028 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Oct 06 13:59:06 crc kubenswrapper[4757]: I1006 13:59:06.908066 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Oct 06 13:59:07 crc kubenswrapper[4757]: I1006 13:59:07.088719 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 06 13:59:07 crc kubenswrapper[4757]: I1006 13:59:07.090888 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 06 13:59:07 crc kubenswrapper[4757]: I1006 13:59:07.533695 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"df05d43b-220d-4ccc-98b1-8c3f01cdd9f5","Type":"ContainerStarted","Data":"b4cf5decd817918515bd69733790bcf20071bb1550b2f51473be98e7b6ff0b64"} Oct 06 13:59:07 crc kubenswrapper[4757]: I1006 13:59:07.574821 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Oct 06 13:59:08 crc kubenswrapper[4757]: I1006 13:59:08.171279 4757 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="a2eb0fb1-47fb-410c-b154-6171760d857c" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.194:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 06 13:59:08 crc kubenswrapper[4757]: I1006 13:59:08.171293 4757 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="a2eb0fb1-47fb-410c-b154-6171760d857c" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.194:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 06 13:59:09 crc kubenswrapper[4757]: I1006 13:59:09.553866 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"df05d43b-220d-4ccc-98b1-8c3f01cdd9f5","Type":"ContainerStarted","Data":"e2aaa0bb7f72a7b437fa5c60bf5f8a78cebb077d154e9665894b5571457c3c93"} Oct 06 13:59:09 crc kubenswrapper[4757]: I1006 13:59:09.554317 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 06 13:59:09 crc kubenswrapper[4757]: I1006 13:59:09.573066 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.158127947 podStartE2EDuration="6.573046142s" podCreationTimestamp="2025-10-06 13:59:03 +0000 UTC" firstStartedPulling="2025-10-06 13:59:04.676759757 +0000 UTC m=+1233.174078294" lastFinishedPulling="2025-10-06 13:59:09.091677952 +0000 UTC m=+1237.588996489" observedRunningTime="2025-10-06 13:59:09.571138061 +0000 UTC m=+1238.068456608" watchObservedRunningTime="2025-10-06 13:59:09.573046142 +0000 UTC m=+1238.070364679" Oct 06 13:59:10 crc kubenswrapper[4757]: I1006 13:59:10.856252 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Oct 06 13:59:12 crc kubenswrapper[4757]: I1006 13:59:12.754888 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Oct 06 13:59:12 crc kubenswrapper[4757]: I1006 13:59:12.756545 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Oct 06 13:59:12 crc kubenswrapper[4757]: I1006 13:59:12.761719 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Oct 06 13:59:13 crc kubenswrapper[4757]: I1006 13:59:13.602708 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Oct 06 13:59:14 crc kubenswrapper[4757]: I1006 13:59:14.600020 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 06 13:59:14 crc kubenswrapper[4757]: I1006 13:59:14.602800 4757 generic.go:334] "Generic (PLEG): container finished" podID="47ec18a3-c53e-48e4-aae7-f32b19af25a8" containerID="0e04c1a2f75660ed2a2220835059f183e347a34be89e740317c9ac93195c0306" exitCode=137 Oct 06 13:59:14 crc kubenswrapper[4757]: I1006 13:59:14.602867 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 06 13:59:14 crc kubenswrapper[4757]: I1006 13:59:14.602910 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"47ec18a3-c53e-48e4-aae7-f32b19af25a8","Type":"ContainerDied","Data":"0e04c1a2f75660ed2a2220835059f183e347a34be89e740317c9ac93195c0306"} Oct 06 13:59:14 crc kubenswrapper[4757]: I1006 13:59:14.602935 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"47ec18a3-c53e-48e4-aae7-f32b19af25a8","Type":"ContainerDied","Data":"2aca52d067d905539471d3db63d830734934208b4c08fcdab8bb89b2a73abcc6"} Oct 06 13:59:14 crc kubenswrapper[4757]: I1006 13:59:14.602950 4757 scope.go:117] "RemoveContainer" containerID="0e04c1a2f75660ed2a2220835059f183e347a34be89e740317c9ac93195c0306" Oct 06 13:59:14 crc kubenswrapper[4757]: I1006 13:59:14.635584 4757 scope.go:117] "RemoveContainer" containerID="0e04c1a2f75660ed2a2220835059f183e347a34be89e740317c9ac93195c0306" Oct 06 13:59:14 crc kubenswrapper[4757]: E1006 13:59:14.636307 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0e04c1a2f75660ed2a2220835059f183e347a34be89e740317c9ac93195c0306\": container with ID starting with 0e04c1a2f75660ed2a2220835059f183e347a34be89e740317c9ac93195c0306 not found: ID does not exist" containerID="0e04c1a2f75660ed2a2220835059f183e347a34be89e740317c9ac93195c0306" Oct 06 13:59:14 crc kubenswrapper[4757]: I1006 13:59:14.636345 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0e04c1a2f75660ed2a2220835059f183e347a34be89e740317c9ac93195c0306"} err="failed to get container status \"0e04c1a2f75660ed2a2220835059f183e347a34be89e740317c9ac93195c0306\": rpc error: code = NotFound desc = could not find container \"0e04c1a2f75660ed2a2220835059f183e347a34be89e740317c9ac93195c0306\": container with ID starting with 0e04c1a2f75660ed2a2220835059f183e347a34be89e740317c9ac93195c0306 not found: ID does not exist" Oct 06 13:59:14 crc kubenswrapper[4757]: I1006 13:59:14.727730 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2l565\" (UniqueName: \"kubernetes.io/projected/47ec18a3-c53e-48e4-aae7-f32b19af25a8-kube-api-access-2l565\") pod \"47ec18a3-c53e-48e4-aae7-f32b19af25a8\" (UID: \"47ec18a3-c53e-48e4-aae7-f32b19af25a8\") " Oct 06 13:59:14 crc kubenswrapper[4757]: I1006 13:59:14.727897 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/47ec18a3-c53e-48e4-aae7-f32b19af25a8-config-data\") pod \"47ec18a3-c53e-48e4-aae7-f32b19af25a8\" (UID: \"47ec18a3-c53e-48e4-aae7-f32b19af25a8\") " Oct 06 13:59:14 crc kubenswrapper[4757]: I1006 13:59:14.728017 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/47ec18a3-c53e-48e4-aae7-f32b19af25a8-combined-ca-bundle\") pod \"47ec18a3-c53e-48e4-aae7-f32b19af25a8\" (UID: \"47ec18a3-c53e-48e4-aae7-f32b19af25a8\") " Oct 06 13:59:14 crc kubenswrapper[4757]: I1006 13:59:14.734431 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/47ec18a3-c53e-48e4-aae7-f32b19af25a8-kube-api-access-2l565" (OuterVolumeSpecName: "kube-api-access-2l565") pod "47ec18a3-c53e-48e4-aae7-f32b19af25a8" (UID: "47ec18a3-c53e-48e4-aae7-f32b19af25a8"). InnerVolumeSpecName "kube-api-access-2l565". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:59:14 crc kubenswrapper[4757]: I1006 13:59:14.764337 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/47ec18a3-c53e-48e4-aae7-f32b19af25a8-config-data" (OuterVolumeSpecName: "config-data") pod "47ec18a3-c53e-48e4-aae7-f32b19af25a8" (UID: "47ec18a3-c53e-48e4-aae7-f32b19af25a8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:59:14 crc kubenswrapper[4757]: I1006 13:59:14.765227 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/47ec18a3-c53e-48e4-aae7-f32b19af25a8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "47ec18a3-c53e-48e4-aae7-f32b19af25a8" (UID: "47ec18a3-c53e-48e4-aae7-f32b19af25a8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:59:14 crc kubenswrapper[4757]: I1006 13:59:14.831910 4757 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/47ec18a3-c53e-48e4-aae7-f32b19af25a8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 06 13:59:14 crc kubenswrapper[4757]: I1006 13:59:14.831965 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2l565\" (UniqueName: \"kubernetes.io/projected/47ec18a3-c53e-48e4-aae7-f32b19af25a8-kube-api-access-2l565\") on node \"crc\" DevicePath \"\"" Oct 06 13:59:14 crc kubenswrapper[4757]: I1006 13:59:14.831977 4757 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/47ec18a3-c53e-48e4-aae7-f32b19af25a8-config-data\") on node \"crc\" DevicePath \"\"" Oct 06 13:59:14 crc kubenswrapper[4757]: I1006 13:59:14.948302 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 06 13:59:14 crc kubenswrapper[4757]: I1006 13:59:14.960192 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 06 13:59:14 crc kubenswrapper[4757]: I1006 13:59:14.985924 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 06 13:59:14 crc kubenswrapper[4757]: E1006 13:59:14.986309 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47ec18a3-c53e-48e4-aae7-f32b19af25a8" containerName="nova-cell1-novncproxy-novncproxy" Oct 06 13:59:14 crc kubenswrapper[4757]: I1006 13:59:14.986333 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="47ec18a3-c53e-48e4-aae7-f32b19af25a8" containerName="nova-cell1-novncproxy-novncproxy" Oct 06 13:59:14 crc kubenswrapper[4757]: I1006 13:59:14.986547 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="47ec18a3-c53e-48e4-aae7-f32b19af25a8" containerName="nova-cell1-novncproxy-novncproxy" Oct 06 13:59:14 crc kubenswrapper[4757]: I1006 13:59:14.987198 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 06 13:59:14 crc kubenswrapper[4757]: I1006 13:59:14.990053 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-public-svc" Oct 06 13:59:14 crc kubenswrapper[4757]: I1006 13:59:14.990156 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-vencrypt" Oct 06 13:59:14 crc kubenswrapper[4757]: I1006 13:59:14.990636 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Oct 06 13:59:15 crc kubenswrapper[4757]: I1006 13:59:15.024254 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 06 13:59:15 crc kubenswrapper[4757]: I1006 13:59:15.035422 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/61d68e86-89f3-4dc6-bb42-7286c789fbba-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"61d68e86-89f3-4dc6-bb42-7286c789fbba\") " pod="openstack/nova-cell1-novncproxy-0" Oct 06 13:59:15 crc kubenswrapper[4757]: I1006 13:59:15.035458 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61d68e86-89f3-4dc6-bb42-7286c789fbba-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"61d68e86-89f3-4dc6-bb42-7286c789fbba\") " pod="openstack/nova-cell1-novncproxy-0" Oct 06 13:59:15 crc kubenswrapper[4757]: I1006 13:59:15.035494 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/61d68e86-89f3-4dc6-bb42-7286c789fbba-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"61d68e86-89f3-4dc6-bb42-7286c789fbba\") " pod="openstack/nova-cell1-novncproxy-0" Oct 06 13:59:15 crc kubenswrapper[4757]: I1006 13:59:15.035533 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/61d68e86-89f3-4dc6-bb42-7286c789fbba-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"61d68e86-89f3-4dc6-bb42-7286c789fbba\") " pod="openstack/nova-cell1-novncproxy-0" Oct 06 13:59:15 crc kubenswrapper[4757]: I1006 13:59:15.035790 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jctd6\" (UniqueName: \"kubernetes.io/projected/61d68e86-89f3-4dc6-bb42-7286c789fbba-kube-api-access-jctd6\") pod \"nova-cell1-novncproxy-0\" (UID: \"61d68e86-89f3-4dc6-bb42-7286c789fbba\") " pod="openstack/nova-cell1-novncproxy-0" Oct 06 13:59:15 crc kubenswrapper[4757]: I1006 13:59:15.136957 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jctd6\" (UniqueName: \"kubernetes.io/projected/61d68e86-89f3-4dc6-bb42-7286c789fbba-kube-api-access-jctd6\") pod \"nova-cell1-novncproxy-0\" (UID: \"61d68e86-89f3-4dc6-bb42-7286c789fbba\") " pod="openstack/nova-cell1-novncproxy-0" Oct 06 13:59:15 crc kubenswrapper[4757]: I1006 13:59:15.137042 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/61d68e86-89f3-4dc6-bb42-7286c789fbba-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"61d68e86-89f3-4dc6-bb42-7286c789fbba\") " pod="openstack/nova-cell1-novncproxy-0" Oct 06 13:59:15 crc kubenswrapper[4757]: I1006 13:59:15.137064 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61d68e86-89f3-4dc6-bb42-7286c789fbba-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"61d68e86-89f3-4dc6-bb42-7286c789fbba\") " pod="openstack/nova-cell1-novncproxy-0" Oct 06 13:59:15 crc kubenswrapper[4757]: I1006 13:59:15.137082 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/61d68e86-89f3-4dc6-bb42-7286c789fbba-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"61d68e86-89f3-4dc6-bb42-7286c789fbba\") " pod="openstack/nova-cell1-novncproxy-0" Oct 06 13:59:15 crc kubenswrapper[4757]: I1006 13:59:15.137160 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/61d68e86-89f3-4dc6-bb42-7286c789fbba-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"61d68e86-89f3-4dc6-bb42-7286c789fbba\") " pod="openstack/nova-cell1-novncproxy-0" Oct 06 13:59:15 crc kubenswrapper[4757]: I1006 13:59:15.141528 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/61d68e86-89f3-4dc6-bb42-7286c789fbba-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"61d68e86-89f3-4dc6-bb42-7286c789fbba\") " pod="openstack/nova-cell1-novncproxy-0" Oct 06 13:59:15 crc kubenswrapper[4757]: I1006 13:59:15.141661 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/61d68e86-89f3-4dc6-bb42-7286c789fbba-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"61d68e86-89f3-4dc6-bb42-7286c789fbba\") " pod="openstack/nova-cell1-novncproxy-0" Oct 06 13:59:15 crc kubenswrapper[4757]: I1006 13:59:15.142056 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/61d68e86-89f3-4dc6-bb42-7286c789fbba-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"61d68e86-89f3-4dc6-bb42-7286c789fbba\") " pod="openstack/nova-cell1-novncproxy-0" Oct 06 13:59:15 crc kubenswrapper[4757]: I1006 13:59:15.149610 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61d68e86-89f3-4dc6-bb42-7286c789fbba-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"61d68e86-89f3-4dc6-bb42-7286c789fbba\") " pod="openstack/nova-cell1-novncproxy-0" Oct 06 13:59:15 crc kubenswrapper[4757]: I1006 13:59:15.156118 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jctd6\" (UniqueName: \"kubernetes.io/projected/61d68e86-89f3-4dc6-bb42-7286c789fbba-kube-api-access-jctd6\") pod \"nova-cell1-novncproxy-0\" (UID: \"61d68e86-89f3-4dc6-bb42-7286c789fbba\") " pod="openstack/nova-cell1-novncproxy-0" Oct 06 13:59:15 crc kubenswrapper[4757]: I1006 13:59:15.308359 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 06 13:59:15 crc kubenswrapper[4757]: I1006 13:59:15.754166 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 06 13:59:16 crc kubenswrapper[4757]: I1006 13:59:16.197921 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="47ec18a3-c53e-48e4-aae7-f32b19af25a8" path="/var/lib/kubelet/pods/47ec18a3-c53e-48e4-aae7-f32b19af25a8/volumes" Oct 06 13:59:16 crc kubenswrapper[4757]: I1006 13:59:16.631169 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"61d68e86-89f3-4dc6-bb42-7286c789fbba","Type":"ContainerStarted","Data":"1e20cdc4c0ab7fbe34675da2bd3b70c163eadea2e1127276d5eac3f283996f8c"} Oct 06 13:59:16 crc kubenswrapper[4757]: I1006 13:59:16.631239 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"61d68e86-89f3-4dc6-bb42-7286c789fbba","Type":"ContainerStarted","Data":"5d5fcb749d2b4c58c4bdece98c3ba1ea71d7b4c44b5f0b2ca7ac765c7da66b28"} Oct 06 13:59:16 crc kubenswrapper[4757]: I1006 13:59:16.652743 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.65272406 podStartE2EDuration="2.65272406s" podCreationTimestamp="2025-10-06 13:59:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:59:16.649560799 +0000 UTC m=+1245.146879346" watchObservedRunningTime="2025-10-06 13:59:16.65272406 +0000 UTC m=+1245.150042597" Oct 06 13:59:17 crc kubenswrapper[4757]: I1006 13:59:17.094558 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Oct 06 13:59:17 crc kubenswrapper[4757]: I1006 13:59:17.094652 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Oct 06 13:59:17 crc kubenswrapper[4757]: I1006 13:59:17.096154 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Oct 06 13:59:17 crc kubenswrapper[4757]: I1006 13:59:17.096193 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Oct 06 13:59:17 crc kubenswrapper[4757]: I1006 13:59:17.105484 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Oct 06 13:59:17 crc kubenswrapper[4757]: I1006 13:59:17.105548 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Oct 06 13:59:17 crc kubenswrapper[4757]: I1006 13:59:17.342280 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5785d8b947-nsbsm"] Oct 06 13:59:17 crc kubenswrapper[4757]: I1006 13:59:17.349130 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5785d8b947-nsbsm" Oct 06 13:59:17 crc kubenswrapper[4757]: I1006 13:59:17.365845 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5785d8b947-nsbsm"] Oct 06 13:59:17 crc kubenswrapper[4757]: I1006 13:59:17.381727 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f8e78f45-d6b0-44d0-b84f-30ac3538ec3c-ovsdbserver-nb\") pod \"dnsmasq-dns-5785d8b947-nsbsm\" (UID: \"f8e78f45-d6b0-44d0-b84f-30ac3538ec3c\") " pod="openstack/dnsmasq-dns-5785d8b947-nsbsm" Oct 06 13:59:17 crc kubenswrapper[4757]: I1006 13:59:17.381798 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f8e78f45-d6b0-44d0-b84f-30ac3538ec3c-ovsdbserver-sb\") pod \"dnsmasq-dns-5785d8b947-nsbsm\" (UID: \"f8e78f45-d6b0-44d0-b84f-30ac3538ec3c\") " pod="openstack/dnsmasq-dns-5785d8b947-nsbsm" Oct 06 13:59:17 crc kubenswrapper[4757]: I1006 13:59:17.381853 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k555p\" (UniqueName: \"kubernetes.io/projected/f8e78f45-d6b0-44d0-b84f-30ac3538ec3c-kube-api-access-k555p\") pod \"dnsmasq-dns-5785d8b947-nsbsm\" (UID: \"f8e78f45-d6b0-44d0-b84f-30ac3538ec3c\") " pod="openstack/dnsmasq-dns-5785d8b947-nsbsm" Oct 06 13:59:17 crc kubenswrapper[4757]: I1006 13:59:17.381883 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f8e78f45-d6b0-44d0-b84f-30ac3538ec3c-dns-svc\") pod \"dnsmasq-dns-5785d8b947-nsbsm\" (UID: \"f8e78f45-d6b0-44d0-b84f-30ac3538ec3c\") " pod="openstack/dnsmasq-dns-5785d8b947-nsbsm" Oct 06 13:59:17 crc kubenswrapper[4757]: I1006 13:59:17.381967 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f8e78f45-d6b0-44d0-b84f-30ac3538ec3c-dns-swift-storage-0\") pod \"dnsmasq-dns-5785d8b947-nsbsm\" (UID: \"f8e78f45-d6b0-44d0-b84f-30ac3538ec3c\") " pod="openstack/dnsmasq-dns-5785d8b947-nsbsm" Oct 06 13:59:17 crc kubenswrapper[4757]: I1006 13:59:17.382017 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f8e78f45-d6b0-44d0-b84f-30ac3538ec3c-config\") pod \"dnsmasq-dns-5785d8b947-nsbsm\" (UID: \"f8e78f45-d6b0-44d0-b84f-30ac3538ec3c\") " pod="openstack/dnsmasq-dns-5785d8b947-nsbsm" Oct 06 13:59:17 crc kubenswrapper[4757]: I1006 13:59:17.482766 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f8e78f45-d6b0-44d0-b84f-30ac3538ec3c-ovsdbserver-sb\") pod \"dnsmasq-dns-5785d8b947-nsbsm\" (UID: \"f8e78f45-d6b0-44d0-b84f-30ac3538ec3c\") " pod="openstack/dnsmasq-dns-5785d8b947-nsbsm" Oct 06 13:59:17 crc kubenswrapper[4757]: I1006 13:59:17.482834 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k555p\" (UniqueName: \"kubernetes.io/projected/f8e78f45-d6b0-44d0-b84f-30ac3538ec3c-kube-api-access-k555p\") pod \"dnsmasq-dns-5785d8b947-nsbsm\" (UID: \"f8e78f45-d6b0-44d0-b84f-30ac3538ec3c\") " pod="openstack/dnsmasq-dns-5785d8b947-nsbsm" Oct 06 13:59:17 crc kubenswrapper[4757]: I1006 13:59:17.482855 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f8e78f45-d6b0-44d0-b84f-30ac3538ec3c-dns-svc\") pod \"dnsmasq-dns-5785d8b947-nsbsm\" (UID: \"f8e78f45-d6b0-44d0-b84f-30ac3538ec3c\") " pod="openstack/dnsmasq-dns-5785d8b947-nsbsm" Oct 06 13:59:17 crc kubenswrapper[4757]: I1006 13:59:17.482922 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f8e78f45-d6b0-44d0-b84f-30ac3538ec3c-dns-swift-storage-0\") pod \"dnsmasq-dns-5785d8b947-nsbsm\" (UID: \"f8e78f45-d6b0-44d0-b84f-30ac3538ec3c\") " pod="openstack/dnsmasq-dns-5785d8b947-nsbsm" Oct 06 13:59:17 crc kubenswrapper[4757]: I1006 13:59:17.482961 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f8e78f45-d6b0-44d0-b84f-30ac3538ec3c-config\") pod \"dnsmasq-dns-5785d8b947-nsbsm\" (UID: \"f8e78f45-d6b0-44d0-b84f-30ac3538ec3c\") " pod="openstack/dnsmasq-dns-5785d8b947-nsbsm" Oct 06 13:59:17 crc kubenswrapper[4757]: I1006 13:59:17.482992 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f8e78f45-d6b0-44d0-b84f-30ac3538ec3c-ovsdbserver-nb\") pod \"dnsmasq-dns-5785d8b947-nsbsm\" (UID: \"f8e78f45-d6b0-44d0-b84f-30ac3538ec3c\") " pod="openstack/dnsmasq-dns-5785d8b947-nsbsm" Oct 06 13:59:17 crc kubenswrapper[4757]: I1006 13:59:17.484014 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f8e78f45-d6b0-44d0-b84f-30ac3538ec3c-ovsdbserver-nb\") pod \"dnsmasq-dns-5785d8b947-nsbsm\" (UID: \"f8e78f45-d6b0-44d0-b84f-30ac3538ec3c\") " pod="openstack/dnsmasq-dns-5785d8b947-nsbsm" Oct 06 13:59:17 crc kubenswrapper[4757]: I1006 13:59:17.484083 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f8e78f45-d6b0-44d0-b84f-30ac3538ec3c-ovsdbserver-sb\") pod \"dnsmasq-dns-5785d8b947-nsbsm\" (UID: \"f8e78f45-d6b0-44d0-b84f-30ac3538ec3c\") " pod="openstack/dnsmasq-dns-5785d8b947-nsbsm" Oct 06 13:59:17 crc kubenswrapper[4757]: I1006 13:59:17.484784 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f8e78f45-d6b0-44d0-b84f-30ac3538ec3c-dns-swift-storage-0\") pod \"dnsmasq-dns-5785d8b947-nsbsm\" (UID: \"f8e78f45-d6b0-44d0-b84f-30ac3538ec3c\") " pod="openstack/dnsmasq-dns-5785d8b947-nsbsm" Oct 06 13:59:17 crc kubenswrapper[4757]: I1006 13:59:17.484836 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f8e78f45-d6b0-44d0-b84f-30ac3538ec3c-config\") pod \"dnsmasq-dns-5785d8b947-nsbsm\" (UID: \"f8e78f45-d6b0-44d0-b84f-30ac3538ec3c\") " pod="openstack/dnsmasq-dns-5785d8b947-nsbsm" Oct 06 13:59:17 crc kubenswrapper[4757]: I1006 13:59:17.485465 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f8e78f45-d6b0-44d0-b84f-30ac3538ec3c-dns-svc\") pod \"dnsmasq-dns-5785d8b947-nsbsm\" (UID: \"f8e78f45-d6b0-44d0-b84f-30ac3538ec3c\") " pod="openstack/dnsmasq-dns-5785d8b947-nsbsm" Oct 06 13:59:17 crc kubenswrapper[4757]: I1006 13:59:17.506980 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k555p\" (UniqueName: \"kubernetes.io/projected/f8e78f45-d6b0-44d0-b84f-30ac3538ec3c-kube-api-access-k555p\") pod \"dnsmasq-dns-5785d8b947-nsbsm\" (UID: \"f8e78f45-d6b0-44d0-b84f-30ac3538ec3c\") " pod="openstack/dnsmasq-dns-5785d8b947-nsbsm" Oct 06 13:59:17 crc kubenswrapper[4757]: I1006 13:59:17.674532 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5785d8b947-nsbsm" Oct 06 13:59:18 crc kubenswrapper[4757]: I1006 13:59:18.168521 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5785d8b947-nsbsm"] Oct 06 13:59:18 crc kubenswrapper[4757]: I1006 13:59:18.649751 4757 generic.go:334] "Generic (PLEG): container finished" podID="f8e78f45-d6b0-44d0-b84f-30ac3538ec3c" containerID="2bb3cf19fc535ce8b640e13e4f2624e106cf69b9da5982d604eed9ab677dace2" exitCode=0 Oct 06 13:59:18 crc kubenswrapper[4757]: I1006 13:59:18.649845 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5785d8b947-nsbsm" event={"ID":"f8e78f45-d6b0-44d0-b84f-30ac3538ec3c","Type":"ContainerDied","Data":"2bb3cf19fc535ce8b640e13e4f2624e106cf69b9da5982d604eed9ab677dace2"} Oct 06 13:59:18 crc kubenswrapper[4757]: I1006 13:59:18.650219 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5785d8b947-nsbsm" event={"ID":"f8e78f45-d6b0-44d0-b84f-30ac3538ec3c","Type":"ContainerStarted","Data":"9ad49b788aaf023754763ccf3c18a79bdea0283d4edaa1a79c20b4d29e7921ef"} Oct 06 13:59:19 crc kubenswrapper[4757]: I1006 13:59:19.164277 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 06 13:59:19 crc kubenswrapper[4757]: I1006 13:59:19.164583 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="df05d43b-220d-4ccc-98b1-8c3f01cdd9f5" containerName="ceilometer-central-agent" containerID="cri-o://ad11ddf01a2aa52555d0fd010110740a7ac7199f08e49ffe5be2a2226e969a68" gracePeriod=30 Oct 06 13:59:19 crc kubenswrapper[4757]: I1006 13:59:19.164747 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="df05d43b-220d-4ccc-98b1-8c3f01cdd9f5" containerName="ceilometer-notification-agent" containerID="cri-o://17911343114f9553ec28f6b524a189fb1c758a3671578f2e794d174cf02a32b7" gracePeriod=30 Oct 06 13:59:19 crc kubenswrapper[4757]: I1006 13:59:19.164788 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="df05d43b-220d-4ccc-98b1-8c3f01cdd9f5" containerName="sg-core" containerID="cri-o://b4cf5decd817918515bd69733790bcf20071bb1550b2f51473be98e7b6ff0b64" gracePeriod=30 Oct 06 13:59:19 crc kubenswrapper[4757]: I1006 13:59:19.165314 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="df05d43b-220d-4ccc-98b1-8c3f01cdd9f5" containerName="proxy-httpd" containerID="cri-o://e2aaa0bb7f72a7b437fa5c60bf5f8a78cebb077d154e9665894b5571457c3c93" gracePeriod=30 Oct 06 13:59:19 crc kubenswrapper[4757]: I1006 13:59:19.265811 4757 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="df05d43b-220d-4ccc-98b1-8c3f01cdd9f5" containerName="proxy-httpd" probeResult="failure" output="Get \"https://10.217.0.196:3000/\": read tcp 10.217.0.2:51774->10.217.0.196:3000: read: connection reset by peer" Oct 06 13:59:19 crc kubenswrapper[4757]: I1006 13:59:19.660923 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5785d8b947-nsbsm" event={"ID":"f8e78f45-d6b0-44d0-b84f-30ac3538ec3c","Type":"ContainerStarted","Data":"96c3bb713c662f2ed04a7bb1393bbac6037596a56af39675d70856aba417eb71"} Oct 06 13:59:19 crc kubenswrapper[4757]: I1006 13:59:19.661991 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5785d8b947-nsbsm" Oct 06 13:59:19 crc kubenswrapper[4757]: I1006 13:59:19.664760 4757 generic.go:334] "Generic (PLEG): container finished" podID="df05d43b-220d-4ccc-98b1-8c3f01cdd9f5" containerID="e2aaa0bb7f72a7b437fa5c60bf5f8a78cebb077d154e9665894b5571457c3c93" exitCode=0 Oct 06 13:59:19 crc kubenswrapper[4757]: I1006 13:59:19.664793 4757 generic.go:334] "Generic (PLEG): container finished" podID="df05d43b-220d-4ccc-98b1-8c3f01cdd9f5" containerID="b4cf5decd817918515bd69733790bcf20071bb1550b2f51473be98e7b6ff0b64" exitCode=2 Oct 06 13:59:19 crc kubenswrapper[4757]: I1006 13:59:19.664803 4757 generic.go:334] "Generic (PLEG): container finished" podID="df05d43b-220d-4ccc-98b1-8c3f01cdd9f5" containerID="ad11ddf01a2aa52555d0fd010110740a7ac7199f08e49ffe5be2a2226e969a68" exitCode=0 Oct 06 13:59:19 crc kubenswrapper[4757]: I1006 13:59:19.664824 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"df05d43b-220d-4ccc-98b1-8c3f01cdd9f5","Type":"ContainerDied","Data":"e2aaa0bb7f72a7b437fa5c60bf5f8a78cebb077d154e9665894b5571457c3c93"} Oct 06 13:59:19 crc kubenswrapper[4757]: I1006 13:59:19.664848 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"df05d43b-220d-4ccc-98b1-8c3f01cdd9f5","Type":"ContainerDied","Data":"b4cf5decd817918515bd69733790bcf20071bb1550b2f51473be98e7b6ff0b64"} Oct 06 13:59:19 crc kubenswrapper[4757]: I1006 13:59:19.664863 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"df05d43b-220d-4ccc-98b1-8c3f01cdd9f5","Type":"ContainerDied","Data":"ad11ddf01a2aa52555d0fd010110740a7ac7199f08e49ffe5be2a2226e969a68"} Oct 06 13:59:19 crc kubenswrapper[4757]: I1006 13:59:19.687709 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5785d8b947-nsbsm" podStartSLOduration=2.687691296 podStartE2EDuration="2.687691296s" podCreationTimestamp="2025-10-06 13:59:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:59:19.679954099 +0000 UTC m=+1248.177272646" watchObservedRunningTime="2025-10-06 13:59:19.687691296 +0000 UTC m=+1248.185009833" Oct 06 13:59:20 crc kubenswrapper[4757]: I1006 13:59:20.101258 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 06 13:59:20 crc kubenswrapper[4757]: I1006 13:59:20.102405 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="a2eb0fb1-47fb-410c-b154-6171760d857c" containerName="nova-api-log" containerID="cri-o://2a5a23205a19ac1c4c50785b8b3723f68d147f42144ebb9895c80ad789e5eeda" gracePeriod=30 Oct 06 13:59:20 crc kubenswrapper[4757]: I1006 13:59:20.102530 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="a2eb0fb1-47fb-410c-b154-6171760d857c" containerName="nova-api-api" containerID="cri-o://464f214dd793a85d11d749f8ee2265047b170992e7df44c39caeffc22a8d283c" gracePeriod=30 Oct 06 13:59:20 crc kubenswrapper[4757]: I1006 13:59:20.309176 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Oct 06 13:59:20 crc kubenswrapper[4757]: I1006 13:59:20.678241 4757 generic.go:334] "Generic (PLEG): container finished" podID="a2eb0fb1-47fb-410c-b154-6171760d857c" containerID="2a5a23205a19ac1c4c50785b8b3723f68d147f42144ebb9895c80ad789e5eeda" exitCode=143 Oct 06 13:59:20 crc kubenswrapper[4757]: I1006 13:59:20.678314 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"a2eb0fb1-47fb-410c-b154-6171760d857c","Type":"ContainerDied","Data":"2a5a23205a19ac1c4c50785b8b3723f68d147f42144ebb9895c80ad789e5eeda"} Oct 06 13:59:23 crc kubenswrapper[4757]: I1006 13:59:23.400315 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 06 13:59:23 crc kubenswrapper[4757]: I1006 13:59:23.490218 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/df05d43b-220d-4ccc-98b1-8c3f01cdd9f5-scripts\") pod \"df05d43b-220d-4ccc-98b1-8c3f01cdd9f5\" (UID: \"df05d43b-220d-4ccc-98b1-8c3f01cdd9f5\") " Oct 06 13:59:23 crc kubenswrapper[4757]: I1006 13:59:23.490378 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/df05d43b-220d-4ccc-98b1-8c3f01cdd9f5-config-data\") pod \"df05d43b-220d-4ccc-98b1-8c3f01cdd9f5\" (UID: \"df05d43b-220d-4ccc-98b1-8c3f01cdd9f5\") " Oct 06 13:59:23 crc kubenswrapper[4757]: I1006 13:59:23.490531 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/df05d43b-220d-4ccc-98b1-8c3f01cdd9f5-combined-ca-bundle\") pod \"df05d43b-220d-4ccc-98b1-8c3f01cdd9f5\" (UID: \"df05d43b-220d-4ccc-98b1-8c3f01cdd9f5\") " Oct 06 13:59:23 crc kubenswrapper[4757]: I1006 13:59:23.490594 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/df05d43b-220d-4ccc-98b1-8c3f01cdd9f5-sg-core-conf-yaml\") pod \"df05d43b-220d-4ccc-98b1-8c3f01cdd9f5\" (UID: \"df05d43b-220d-4ccc-98b1-8c3f01cdd9f5\") " Oct 06 13:59:23 crc kubenswrapper[4757]: I1006 13:59:23.490660 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/df05d43b-220d-4ccc-98b1-8c3f01cdd9f5-run-httpd\") pod \"df05d43b-220d-4ccc-98b1-8c3f01cdd9f5\" (UID: \"df05d43b-220d-4ccc-98b1-8c3f01cdd9f5\") " Oct 06 13:59:23 crc kubenswrapper[4757]: I1006 13:59:23.490827 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/df05d43b-220d-4ccc-98b1-8c3f01cdd9f5-log-httpd\") pod \"df05d43b-220d-4ccc-98b1-8c3f01cdd9f5\" (UID: \"df05d43b-220d-4ccc-98b1-8c3f01cdd9f5\") " Oct 06 13:59:23 crc kubenswrapper[4757]: I1006 13:59:23.490853 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/df05d43b-220d-4ccc-98b1-8c3f01cdd9f5-ceilometer-tls-certs\") pod \"df05d43b-220d-4ccc-98b1-8c3f01cdd9f5\" (UID: \"df05d43b-220d-4ccc-98b1-8c3f01cdd9f5\") " Oct 06 13:59:23 crc kubenswrapper[4757]: I1006 13:59:23.490918 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5n5cn\" (UniqueName: \"kubernetes.io/projected/df05d43b-220d-4ccc-98b1-8c3f01cdd9f5-kube-api-access-5n5cn\") pod \"df05d43b-220d-4ccc-98b1-8c3f01cdd9f5\" (UID: \"df05d43b-220d-4ccc-98b1-8c3f01cdd9f5\") " Oct 06 13:59:23 crc kubenswrapper[4757]: I1006 13:59:23.491467 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/df05d43b-220d-4ccc-98b1-8c3f01cdd9f5-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "df05d43b-220d-4ccc-98b1-8c3f01cdd9f5" (UID: "df05d43b-220d-4ccc-98b1-8c3f01cdd9f5"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 13:59:23 crc kubenswrapper[4757]: I1006 13:59:23.492240 4757 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/df05d43b-220d-4ccc-98b1-8c3f01cdd9f5-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 06 13:59:23 crc kubenswrapper[4757]: I1006 13:59:23.493427 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/df05d43b-220d-4ccc-98b1-8c3f01cdd9f5-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "df05d43b-220d-4ccc-98b1-8c3f01cdd9f5" (UID: "df05d43b-220d-4ccc-98b1-8c3f01cdd9f5"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 13:59:23 crc kubenswrapper[4757]: I1006 13:59:23.509153 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/df05d43b-220d-4ccc-98b1-8c3f01cdd9f5-kube-api-access-5n5cn" (OuterVolumeSpecName: "kube-api-access-5n5cn") pod "df05d43b-220d-4ccc-98b1-8c3f01cdd9f5" (UID: "df05d43b-220d-4ccc-98b1-8c3f01cdd9f5"). InnerVolumeSpecName "kube-api-access-5n5cn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:59:23 crc kubenswrapper[4757]: I1006 13:59:23.513661 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/df05d43b-220d-4ccc-98b1-8c3f01cdd9f5-scripts" (OuterVolumeSpecName: "scripts") pod "df05d43b-220d-4ccc-98b1-8c3f01cdd9f5" (UID: "df05d43b-220d-4ccc-98b1-8c3f01cdd9f5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:59:23 crc kubenswrapper[4757]: I1006 13:59:23.520623 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/df05d43b-220d-4ccc-98b1-8c3f01cdd9f5-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "df05d43b-220d-4ccc-98b1-8c3f01cdd9f5" (UID: "df05d43b-220d-4ccc-98b1-8c3f01cdd9f5"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:59:23 crc kubenswrapper[4757]: I1006 13:59:23.572772 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/df05d43b-220d-4ccc-98b1-8c3f01cdd9f5-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "df05d43b-220d-4ccc-98b1-8c3f01cdd9f5" (UID: "df05d43b-220d-4ccc-98b1-8c3f01cdd9f5"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:59:23 crc kubenswrapper[4757]: I1006 13:59:23.593155 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/df05d43b-220d-4ccc-98b1-8c3f01cdd9f5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "df05d43b-220d-4ccc-98b1-8c3f01cdd9f5" (UID: "df05d43b-220d-4ccc-98b1-8c3f01cdd9f5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:59:23 crc kubenswrapper[4757]: I1006 13:59:23.594065 4757 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/df05d43b-220d-4ccc-98b1-8c3f01cdd9f5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 06 13:59:23 crc kubenswrapper[4757]: I1006 13:59:23.594223 4757 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/df05d43b-220d-4ccc-98b1-8c3f01cdd9f5-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 06 13:59:23 crc kubenswrapper[4757]: I1006 13:59:23.594244 4757 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/df05d43b-220d-4ccc-98b1-8c3f01cdd9f5-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 06 13:59:23 crc kubenswrapper[4757]: I1006 13:59:23.594255 4757 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/df05d43b-220d-4ccc-98b1-8c3f01cdd9f5-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 06 13:59:23 crc kubenswrapper[4757]: I1006 13:59:23.594267 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5n5cn\" (UniqueName: \"kubernetes.io/projected/df05d43b-220d-4ccc-98b1-8c3f01cdd9f5-kube-api-access-5n5cn\") on node \"crc\" DevicePath \"\"" Oct 06 13:59:23 crc kubenswrapper[4757]: I1006 13:59:23.594283 4757 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/df05d43b-220d-4ccc-98b1-8c3f01cdd9f5-scripts\") on node \"crc\" DevicePath \"\"" Oct 06 13:59:23 crc kubenswrapper[4757]: I1006 13:59:23.619770 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/df05d43b-220d-4ccc-98b1-8c3f01cdd9f5-config-data" (OuterVolumeSpecName: "config-data") pod "df05d43b-220d-4ccc-98b1-8c3f01cdd9f5" (UID: "df05d43b-220d-4ccc-98b1-8c3f01cdd9f5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:59:23 crc kubenswrapper[4757]: I1006 13:59:23.669273 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 06 13:59:23 crc kubenswrapper[4757]: I1006 13:59:23.695643 4757 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/df05d43b-220d-4ccc-98b1-8c3f01cdd9f5-config-data\") on node \"crc\" DevicePath \"\"" Oct 06 13:59:23 crc kubenswrapper[4757]: I1006 13:59:23.714536 4757 generic.go:334] "Generic (PLEG): container finished" podID="df05d43b-220d-4ccc-98b1-8c3f01cdd9f5" containerID="17911343114f9553ec28f6b524a189fb1c758a3671578f2e794d174cf02a32b7" exitCode=0 Oct 06 13:59:23 crc kubenswrapper[4757]: I1006 13:59:23.714594 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"df05d43b-220d-4ccc-98b1-8c3f01cdd9f5","Type":"ContainerDied","Data":"17911343114f9553ec28f6b524a189fb1c758a3671578f2e794d174cf02a32b7"} Oct 06 13:59:23 crc kubenswrapper[4757]: I1006 13:59:23.714621 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"df05d43b-220d-4ccc-98b1-8c3f01cdd9f5","Type":"ContainerDied","Data":"4218c1ced5dfbb639dfe8deca37c3e72a0d75c9146d563e4d22d017f28a4f141"} Oct 06 13:59:23 crc kubenswrapper[4757]: I1006 13:59:23.714639 4757 scope.go:117] "RemoveContainer" containerID="e2aaa0bb7f72a7b437fa5c60bf5f8a78cebb077d154e9665894b5571457c3c93" Oct 06 13:59:23 crc kubenswrapper[4757]: I1006 13:59:23.714757 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 06 13:59:23 crc kubenswrapper[4757]: I1006 13:59:23.724936 4757 generic.go:334] "Generic (PLEG): container finished" podID="a2eb0fb1-47fb-410c-b154-6171760d857c" containerID="464f214dd793a85d11d749f8ee2265047b170992e7df44c39caeffc22a8d283c" exitCode=0 Oct 06 13:59:23 crc kubenswrapper[4757]: I1006 13:59:23.724984 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"a2eb0fb1-47fb-410c-b154-6171760d857c","Type":"ContainerDied","Data":"464f214dd793a85d11d749f8ee2265047b170992e7df44c39caeffc22a8d283c"} Oct 06 13:59:23 crc kubenswrapper[4757]: I1006 13:59:23.725014 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"a2eb0fb1-47fb-410c-b154-6171760d857c","Type":"ContainerDied","Data":"0bbdbef559fc5f7133f4e9e6dd1eb647165766c25f3ca0910dd38ad62521cc43"} Oct 06 13:59:23 crc kubenswrapper[4757]: I1006 13:59:23.725080 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 06 13:59:23 crc kubenswrapper[4757]: I1006 13:59:23.758977 4757 scope.go:117] "RemoveContainer" containerID="b4cf5decd817918515bd69733790bcf20071bb1550b2f51473be98e7b6ff0b64" Oct 06 13:59:23 crc kubenswrapper[4757]: I1006 13:59:23.759886 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 06 13:59:23 crc kubenswrapper[4757]: I1006 13:59:23.768620 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 06 13:59:23 crc kubenswrapper[4757]: I1006 13:59:23.788137 4757 scope.go:117] "RemoveContainer" containerID="17911343114f9553ec28f6b524a189fb1c758a3671578f2e794d174cf02a32b7" Oct 06 13:59:23 crc kubenswrapper[4757]: I1006 13:59:23.790512 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 06 13:59:23 crc kubenswrapper[4757]: E1006 13:59:23.790845 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a2eb0fb1-47fb-410c-b154-6171760d857c" containerName="nova-api-log" Oct 06 13:59:23 crc kubenswrapper[4757]: I1006 13:59:23.790861 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="a2eb0fb1-47fb-410c-b154-6171760d857c" containerName="nova-api-log" Oct 06 13:59:23 crc kubenswrapper[4757]: E1006 13:59:23.790873 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="df05d43b-220d-4ccc-98b1-8c3f01cdd9f5" containerName="sg-core" Oct 06 13:59:23 crc kubenswrapper[4757]: I1006 13:59:23.790879 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="df05d43b-220d-4ccc-98b1-8c3f01cdd9f5" containerName="sg-core" Oct 06 13:59:23 crc kubenswrapper[4757]: E1006 13:59:23.790893 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a2eb0fb1-47fb-410c-b154-6171760d857c" containerName="nova-api-api" Oct 06 13:59:23 crc kubenswrapper[4757]: I1006 13:59:23.790900 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="a2eb0fb1-47fb-410c-b154-6171760d857c" containerName="nova-api-api" Oct 06 13:59:23 crc kubenswrapper[4757]: E1006 13:59:23.790941 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="df05d43b-220d-4ccc-98b1-8c3f01cdd9f5" containerName="proxy-httpd" Oct 06 13:59:23 crc kubenswrapper[4757]: I1006 13:59:23.790947 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="df05d43b-220d-4ccc-98b1-8c3f01cdd9f5" containerName="proxy-httpd" Oct 06 13:59:23 crc kubenswrapper[4757]: E1006 13:59:23.790958 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="df05d43b-220d-4ccc-98b1-8c3f01cdd9f5" containerName="ceilometer-notification-agent" Oct 06 13:59:23 crc kubenswrapper[4757]: I1006 13:59:23.790965 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="df05d43b-220d-4ccc-98b1-8c3f01cdd9f5" containerName="ceilometer-notification-agent" Oct 06 13:59:23 crc kubenswrapper[4757]: E1006 13:59:23.790974 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="df05d43b-220d-4ccc-98b1-8c3f01cdd9f5" containerName="ceilometer-central-agent" Oct 06 13:59:23 crc kubenswrapper[4757]: I1006 13:59:23.790981 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="df05d43b-220d-4ccc-98b1-8c3f01cdd9f5" containerName="ceilometer-central-agent" Oct 06 13:59:23 crc kubenswrapper[4757]: I1006 13:59:23.791259 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="df05d43b-220d-4ccc-98b1-8c3f01cdd9f5" containerName="ceilometer-notification-agent" Oct 06 13:59:23 crc kubenswrapper[4757]: I1006 13:59:23.791288 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="df05d43b-220d-4ccc-98b1-8c3f01cdd9f5" containerName="proxy-httpd" Oct 06 13:59:23 crc kubenswrapper[4757]: I1006 13:59:23.791307 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="df05d43b-220d-4ccc-98b1-8c3f01cdd9f5" containerName="sg-core" Oct 06 13:59:23 crc kubenswrapper[4757]: I1006 13:59:23.791320 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="a2eb0fb1-47fb-410c-b154-6171760d857c" containerName="nova-api-api" Oct 06 13:59:23 crc kubenswrapper[4757]: I1006 13:59:23.791326 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="a2eb0fb1-47fb-410c-b154-6171760d857c" containerName="nova-api-log" Oct 06 13:59:23 crc kubenswrapper[4757]: I1006 13:59:23.791334 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="df05d43b-220d-4ccc-98b1-8c3f01cdd9f5" containerName="ceilometer-central-agent" Oct 06 13:59:23 crc kubenswrapper[4757]: I1006 13:59:23.792851 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 06 13:59:23 crc kubenswrapper[4757]: I1006 13:59:23.797663 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Oct 06 13:59:23 crc kubenswrapper[4757]: I1006 13:59:23.797753 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a2eb0fb1-47fb-410c-b154-6171760d857c-combined-ca-bundle\") pod \"a2eb0fb1-47fb-410c-b154-6171760d857c\" (UID: \"a2eb0fb1-47fb-410c-b154-6171760d857c\") " Oct 06 13:59:23 crc kubenswrapper[4757]: I1006 13:59:23.797854 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 06 13:59:23 crc kubenswrapper[4757]: I1006 13:59:23.797879 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a2eb0fb1-47fb-410c-b154-6171760d857c-logs\") pod \"a2eb0fb1-47fb-410c-b154-6171760d857c\" (UID: \"a2eb0fb1-47fb-410c-b154-6171760d857c\") " Oct 06 13:59:23 crc kubenswrapper[4757]: I1006 13:59:23.797935 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pncr6\" (UniqueName: \"kubernetes.io/projected/a2eb0fb1-47fb-410c-b154-6171760d857c-kube-api-access-pncr6\") pod \"a2eb0fb1-47fb-410c-b154-6171760d857c\" (UID: \"a2eb0fb1-47fb-410c-b154-6171760d857c\") " Oct 06 13:59:23 crc kubenswrapper[4757]: I1006 13:59:23.797982 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a2eb0fb1-47fb-410c-b154-6171760d857c-config-data\") pod \"a2eb0fb1-47fb-410c-b154-6171760d857c\" (UID: \"a2eb0fb1-47fb-410c-b154-6171760d857c\") " Oct 06 13:59:23 crc kubenswrapper[4757]: I1006 13:59:23.799828 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 06 13:59:23 crc kubenswrapper[4757]: I1006 13:59:23.801219 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a2eb0fb1-47fb-410c-b154-6171760d857c-logs" (OuterVolumeSpecName: "logs") pod "a2eb0fb1-47fb-410c-b154-6171760d857c" (UID: "a2eb0fb1-47fb-410c-b154-6171760d857c"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 13:59:23 crc kubenswrapper[4757]: I1006 13:59:23.805256 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a2eb0fb1-47fb-410c-b154-6171760d857c-kube-api-access-pncr6" (OuterVolumeSpecName: "kube-api-access-pncr6") pod "a2eb0fb1-47fb-410c-b154-6171760d857c" (UID: "a2eb0fb1-47fb-410c-b154-6171760d857c"). InnerVolumeSpecName "kube-api-access-pncr6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:59:23 crc kubenswrapper[4757]: I1006 13:59:23.810911 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 06 13:59:23 crc kubenswrapper[4757]: I1006 13:59:23.827815 4757 scope.go:117] "RemoveContainer" containerID="ad11ddf01a2aa52555d0fd010110740a7ac7199f08e49ffe5be2a2226e969a68" Oct 06 13:59:23 crc kubenswrapper[4757]: I1006 13:59:23.841828 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a2eb0fb1-47fb-410c-b154-6171760d857c-config-data" (OuterVolumeSpecName: "config-data") pod "a2eb0fb1-47fb-410c-b154-6171760d857c" (UID: "a2eb0fb1-47fb-410c-b154-6171760d857c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:59:23 crc kubenswrapper[4757]: I1006 13:59:23.866191 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a2eb0fb1-47fb-410c-b154-6171760d857c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a2eb0fb1-47fb-410c-b154-6171760d857c" (UID: "a2eb0fb1-47fb-410c-b154-6171760d857c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:59:23 crc kubenswrapper[4757]: I1006 13:59:23.873335 4757 scope.go:117] "RemoveContainer" containerID="e2aaa0bb7f72a7b437fa5c60bf5f8a78cebb077d154e9665894b5571457c3c93" Oct 06 13:59:23 crc kubenswrapper[4757]: E1006 13:59:23.877299 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e2aaa0bb7f72a7b437fa5c60bf5f8a78cebb077d154e9665894b5571457c3c93\": container with ID starting with e2aaa0bb7f72a7b437fa5c60bf5f8a78cebb077d154e9665894b5571457c3c93 not found: ID does not exist" containerID="e2aaa0bb7f72a7b437fa5c60bf5f8a78cebb077d154e9665894b5571457c3c93" Oct 06 13:59:23 crc kubenswrapper[4757]: I1006 13:59:23.878155 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e2aaa0bb7f72a7b437fa5c60bf5f8a78cebb077d154e9665894b5571457c3c93"} err="failed to get container status \"e2aaa0bb7f72a7b437fa5c60bf5f8a78cebb077d154e9665894b5571457c3c93\": rpc error: code = NotFound desc = could not find container \"e2aaa0bb7f72a7b437fa5c60bf5f8a78cebb077d154e9665894b5571457c3c93\": container with ID starting with e2aaa0bb7f72a7b437fa5c60bf5f8a78cebb077d154e9665894b5571457c3c93 not found: ID does not exist" Oct 06 13:59:23 crc kubenswrapper[4757]: I1006 13:59:23.878209 4757 scope.go:117] "RemoveContainer" containerID="b4cf5decd817918515bd69733790bcf20071bb1550b2f51473be98e7b6ff0b64" Oct 06 13:59:23 crc kubenswrapper[4757]: E1006 13:59:23.878631 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b4cf5decd817918515bd69733790bcf20071bb1550b2f51473be98e7b6ff0b64\": container with ID starting with b4cf5decd817918515bd69733790bcf20071bb1550b2f51473be98e7b6ff0b64 not found: ID does not exist" containerID="b4cf5decd817918515bd69733790bcf20071bb1550b2f51473be98e7b6ff0b64" Oct 06 13:59:23 crc kubenswrapper[4757]: I1006 13:59:23.878751 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b4cf5decd817918515bd69733790bcf20071bb1550b2f51473be98e7b6ff0b64"} err="failed to get container status \"b4cf5decd817918515bd69733790bcf20071bb1550b2f51473be98e7b6ff0b64\": rpc error: code = NotFound desc = could not find container \"b4cf5decd817918515bd69733790bcf20071bb1550b2f51473be98e7b6ff0b64\": container with ID starting with b4cf5decd817918515bd69733790bcf20071bb1550b2f51473be98e7b6ff0b64 not found: ID does not exist" Oct 06 13:59:23 crc kubenswrapper[4757]: I1006 13:59:23.878884 4757 scope.go:117] "RemoveContainer" containerID="17911343114f9553ec28f6b524a189fb1c758a3671578f2e794d174cf02a32b7" Oct 06 13:59:23 crc kubenswrapper[4757]: E1006 13:59:23.879231 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"17911343114f9553ec28f6b524a189fb1c758a3671578f2e794d174cf02a32b7\": container with ID starting with 17911343114f9553ec28f6b524a189fb1c758a3671578f2e794d174cf02a32b7 not found: ID does not exist" containerID="17911343114f9553ec28f6b524a189fb1c758a3671578f2e794d174cf02a32b7" Oct 06 13:59:23 crc kubenswrapper[4757]: I1006 13:59:23.879255 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"17911343114f9553ec28f6b524a189fb1c758a3671578f2e794d174cf02a32b7"} err="failed to get container status \"17911343114f9553ec28f6b524a189fb1c758a3671578f2e794d174cf02a32b7\": rpc error: code = NotFound desc = could not find container \"17911343114f9553ec28f6b524a189fb1c758a3671578f2e794d174cf02a32b7\": container with ID starting with 17911343114f9553ec28f6b524a189fb1c758a3671578f2e794d174cf02a32b7 not found: ID does not exist" Oct 06 13:59:23 crc kubenswrapper[4757]: I1006 13:59:23.879274 4757 scope.go:117] "RemoveContainer" containerID="ad11ddf01a2aa52555d0fd010110740a7ac7199f08e49ffe5be2a2226e969a68" Oct 06 13:59:23 crc kubenswrapper[4757]: E1006 13:59:23.882318 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ad11ddf01a2aa52555d0fd010110740a7ac7199f08e49ffe5be2a2226e969a68\": container with ID starting with ad11ddf01a2aa52555d0fd010110740a7ac7199f08e49ffe5be2a2226e969a68 not found: ID does not exist" containerID="ad11ddf01a2aa52555d0fd010110740a7ac7199f08e49ffe5be2a2226e969a68" Oct 06 13:59:23 crc kubenswrapper[4757]: I1006 13:59:23.882435 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ad11ddf01a2aa52555d0fd010110740a7ac7199f08e49ffe5be2a2226e969a68"} err="failed to get container status \"ad11ddf01a2aa52555d0fd010110740a7ac7199f08e49ffe5be2a2226e969a68\": rpc error: code = NotFound desc = could not find container \"ad11ddf01a2aa52555d0fd010110740a7ac7199f08e49ffe5be2a2226e969a68\": container with ID starting with ad11ddf01a2aa52555d0fd010110740a7ac7199f08e49ffe5be2a2226e969a68 not found: ID does not exist" Oct 06 13:59:23 crc kubenswrapper[4757]: I1006 13:59:23.882506 4757 scope.go:117] "RemoveContainer" containerID="464f214dd793a85d11d749f8ee2265047b170992e7df44c39caeffc22a8d283c" Oct 06 13:59:23 crc kubenswrapper[4757]: I1006 13:59:23.901832 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/542090f5-d2d8-4f78-b566-10e9885c341e-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"542090f5-d2d8-4f78-b566-10e9885c341e\") " pod="openstack/ceilometer-0" Oct 06 13:59:23 crc kubenswrapper[4757]: I1006 13:59:23.901897 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/542090f5-d2d8-4f78-b566-10e9885c341e-run-httpd\") pod \"ceilometer-0\" (UID: \"542090f5-d2d8-4f78-b566-10e9885c341e\") " pod="openstack/ceilometer-0" Oct 06 13:59:23 crc kubenswrapper[4757]: I1006 13:59:23.901921 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/542090f5-d2d8-4f78-b566-10e9885c341e-log-httpd\") pod \"ceilometer-0\" (UID: \"542090f5-d2d8-4f78-b566-10e9885c341e\") " pod="openstack/ceilometer-0" Oct 06 13:59:23 crc kubenswrapper[4757]: I1006 13:59:23.901952 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/542090f5-d2d8-4f78-b566-10e9885c341e-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"542090f5-d2d8-4f78-b566-10e9885c341e\") " pod="openstack/ceilometer-0" Oct 06 13:59:23 crc kubenswrapper[4757]: I1006 13:59:23.902021 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/542090f5-d2d8-4f78-b566-10e9885c341e-config-data\") pod \"ceilometer-0\" (UID: \"542090f5-d2d8-4f78-b566-10e9885c341e\") " pod="openstack/ceilometer-0" Oct 06 13:59:23 crc kubenswrapper[4757]: I1006 13:59:23.902055 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/542090f5-d2d8-4f78-b566-10e9885c341e-scripts\") pod \"ceilometer-0\" (UID: \"542090f5-d2d8-4f78-b566-10e9885c341e\") " pod="openstack/ceilometer-0" Oct 06 13:59:23 crc kubenswrapper[4757]: I1006 13:59:23.902068 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/542090f5-d2d8-4f78-b566-10e9885c341e-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"542090f5-d2d8-4f78-b566-10e9885c341e\") " pod="openstack/ceilometer-0" Oct 06 13:59:23 crc kubenswrapper[4757]: I1006 13:59:23.902183 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fqlm9\" (UniqueName: \"kubernetes.io/projected/542090f5-d2d8-4f78-b566-10e9885c341e-kube-api-access-fqlm9\") pod \"ceilometer-0\" (UID: \"542090f5-d2d8-4f78-b566-10e9885c341e\") " pod="openstack/ceilometer-0" Oct 06 13:59:23 crc kubenswrapper[4757]: I1006 13:59:23.902238 4757 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a2eb0fb1-47fb-410c-b154-6171760d857c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 06 13:59:23 crc kubenswrapper[4757]: I1006 13:59:23.902252 4757 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a2eb0fb1-47fb-410c-b154-6171760d857c-logs\") on node \"crc\" DevicePath \"\"" Oct 06 13:59:23 crc kubenswrapper[4757]: I1006 13:59:23.902261 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pncr6\" (UniqueName: \"kubernetes.io/projected/a2eb0fb1-47fb-410c-b154-6171760d857c-kube-api-access-pncr6\") on node \"crc\" DevicePath \"\"" Oct 06 13:59:23 crc kubenswrapper[4757]: I1006 13:59:23.902272 4757 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a2eb0fb1-47fb-410c-b154-6171760d857c-config-data\") on node \"crc\" DevicePath \"\"" Oct 06 13:59:23 crc kubenswrapper[4757]: I1006 13:59:23.917156 4757 scope.go:117] "RemoveContainer" containerID="2a5a23205a19ac1c4c50785b8b3723f68d147f42144ebb9895c80ad789e5eeda" Oct 06 13:59:23 crc kubenswrapper[4757]: I1006 13:59:23.936209 4757 scope.go:117] "RemoveContainer" containerID="464f214dd793a85d11d749f8ee2265047b170992e7df44c39caeffc22a8d283c" Oct 06 13:59:23 crc kubenswrapper[4757]: E1006 13:59:23.936638 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"464f214dd793a85d11d749f8ee2265047b170992e7df44c39caeffc22a8d283c\": container with ID starting with 464f214dd793a85d11d749f8ee2265047b170992e7df44c39caeffc22a8d283c not found: ID does not exist" containerID="464f214dd793a85d11d749f8ee2265047b170992e7df44c39caeffc22a8d283c" Oct 06 13:59:23 crc kubenswrapper[4757]: I1006 13:59:23.936675 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"464f214dd793a85d11d749f8ee2265047b170992e7df44c39caeffc22a8d283c"} err="failed to get container status \"464f214dd793a85d11d749f8ee2265047b170992e7df44c39caeffc22a8d283c\": rpc error: code = NotFound desc = could not find container \"464f214dd793a85d11d749f8ee2265047b170992e7df44c39caeffc22a8d283c\": container with ID starting with 464f214dd793a85d11d749f8ee2265047b170992e7df44c39caeffc22a8d283c not found: ID does not exist" Oct 06 13:59:23 crc kubenswrapper[4757]: I1006 13:59:23.936703 4757 scope.go:117] "RemoveContainer" containerID="2a5a23205a19ac1c4c50785b8b3723f68d147f42144ebb9895c80ad789e5eeda" Oct 06 13:59:23 crc kubenswrapper[4757]: E1006 13:59:23.937044 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2a5a23205a19ac1c4c50785b8b3723f68d147f42144ebb9895c80ad789e5eeda\": container with ID starting with 2a5a23205a19ac1c4c50785b8b3723f68d147f42144ebb9895c80ad789e5eeda not found: ID does not exist" containerID="2a5a23205a19ac1c4c50785b8b3723f68d147f42144ebb9895c80ad789e5eeda" Oct 06 13:59:23 crc kubenswrapper[4757]: I1006 13:59:23.937077 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2a5a23205a19ac1c4c50785b8b3723f68d147f42144ebb9895c80ad789e5eeda"} err="failed to get container status \"2a5a23205a19ac1c4c50785b8b3723f68d147f42144ebb9895c80ad789e5eeda\": rpc error: code = NotFound desc = could not find container \"2a5a23205a19ac1c4c50785b8b3723f68d147f42144ebb9895c80ad789e5eeda\": container with ID starting with 2a5a23205a19ac1c4c50785b8b3723f68d147f42144ebb9895c80ad789e5eeda not found: ID does not exist" Oct 06 13:59:24 crc kubenswrapper[4757]: I1006 13:59:24.003684 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/542090f5-d2d8-4f78-b566-10e9885c341e-scripts\") pod \"ceilometer-0\" (UID: \"542090f5-d2d8-4f78-b566-10e9885c341e\") " pod="openstack/ceilometer-0" Oct 06 13:59:24 crc kubenswrapper[4757]: I1006 13:59:24.003733 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/542090f5-d2d8-4f78-b566-10e9885c341e-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"542090f5-d2d8-4f78-b566-10e9885c341e\") " pod="openstack/ceilometer-0" Oct 06 13:59:24 crc kubenswrapper[4757]: I1006 13:59:24.003865 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fqlm9\" (UniqueName: \"kubernetes.io/projected/542090f5-d2d8-4f78-b566-10e9885c341e-kube-api-access-fqlm9\") pod \"ceilometer-0\" (UID: \"542090f5-d2d8-4f78-b566-10e9885c341e\") " pod="openstack/ceilometer-0" Oct 06 13:59:24 crc kubenswrapper[4757]: I1006 13:59:24.003901 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/542090f5-d2d8-4f78-b566-10e9885c341e-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"542090f5-d2d8-4f78-b566-10e9885c341e\") " pod="openstack/ceilometer-0" Oct 06 13:59:24 crc kubenswrapper[4757]: I1006 13:59:24.003947 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/542090f5-d2d8-4f78-b566-10e9885c341e-run-httpd\") pod \"ceilometer-0\" (UID: \"542090f5-d2d8-4f78-b566-10e9885c341e\") " pod="openstack/ceilometer-0" Oct 06 13:59:24 crc kubenswrapper[4757]: I1006 13:59:24.003974 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/542090f5-d2d8-4f78-b566-10e9885c341e-log-httpd\") pod \"ceilometer-0\" (UID: \"542090f5-d2d8-4f78-b566-10e9885c341e\") " pod="openstack/ceilometer-0" Oct 06 13:59:24 crc kubenswrapper[4757]: I1006 13:59:24.004012 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/542090f5-d2d8-4f78-b566-10e9885c341e-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"542090f5-d2d8-4f78-b566-10e9885c341e\") " pod="openstack/ceilometer-0" Oct 06 13:59:24 crc kubenswrapper[4757]: I1006 13:59:24.004060 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/542090f5-d2d8-4f78-b566-10e9885c341e-config-data\") pod \"ceilometer-0\" (UID: \"542090f5-d2d8-4f78-b566-10e9885c341e\") " pod="openstack/ceilometer-0" Oct 06 13:59:24 crc kubenswrapper[4757]: I1006 13:59:24.004550 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/542090f5-d2d8-4f78-b566-10e9885c341e-run-httpd\") pod \"ceilometer-0\" (UID: \"542090f5-d2d8-4f78-b566-10e9885c341e\") " pod="openstack/ceilometer-0" Oct 06 13:59:24 crc kubenswrapper[4757]: I1006 13:59:24.004775 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/542090f5-d2d8-4f78-b566-10e9885c341e-log-httpd\") pod \"ceilometer-0\" (UID: \"542090f5-d2d8-4f78-b566-10e9885c341e\") " pod="openstack/ceilometer-0" Oct 06 13:59:24 crc kubenswrapper[4757]: I1006 13:59:24.007515 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/542090f5-d2d8-4f78-b566-10e9885c341e-scripts\") pod \"ceilometer-0\" (UID: \"542090f5-d2d8-4f78-b566-10e9885c341e\") " pod="openstack/ceilometer-0" Oct 06 13:59:24 crc kubenswrapper[4757]: I1006 13:59:24.007943 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/542090f5-d2d8-4f78-b566-10e9885c341e-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"542090f5-d2d8-4f78-b566-10e9885c341e\") " pod="openstack/ceilometer-0" Oct 06 13:59:24 crc kubenswrapper[4757]: I1006 13:59:24.008210 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/542090f5-d2d8-4f78-b566-10e9885c341e-config-data\") pod \"ceilometer-0\" (UID: \"542090f5-d2d8-4f78-b566-10e9885c341e\") " pod="openstack/ceilometer-0" Oct 06 13:59:24 crc kubenswrapper[4757]: I1006 13:59:24.008664 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/542090f5-d2d8-4f78-b566-10e9885c341e-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"542090f5-d2d8-4f78-b566-10e9885c341e\") " pod="openstack/ceilometer-0" Oct 06 13:59:24 crc kubenswrapper[4757]: I1006 13:59:24.017813 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/542090f5-d2d8-4f78-b566-10e9885c341e-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"542090f5-d2d8-4f78-b566-10e9885c341e\") " pod="openstack/ceilometer-0" Oct 06 13:59:24 crc kubenswrapper[4757]: I1006 13:59:24.021142 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fqlm9\" (UniqueName: \"kubernetes.io/projected/542090f5-d2d8-4f78-b566-10e9885c341e-kube-api-access-fqlm9\") pod \"ceilometer-0\" (UID: \"542090f5-d2d8-4f78-b566-10e9885c341e\") " pod="openstack/ceilometer-0" Oct 06 13:59:24 crc kubenswrapper[4757]: I1006 13:59:24.112563 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 06 13:59:24 crc kubenswrapper[4757]: I1006 13:59:24.122644 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Oct 06 13:59:24 crc kubenswrapper[4757]: I1006 13:59:24.128903 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 06 13:59:24 crc kubenswrapper[4757]: I1006 13:59:24.145227 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Oct 06 13:59:24 crc kubenswrapper[4757]: I1006 13:59:24.147455 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 06 13:59:24 crc kubenswrapper[4757]: I1006 13:59:24.150305 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Oct 06 13:59:24 crc kubenswrapper[4757]: I1006 13:59:24.150315 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Oct 06 13:59:24 crc kubenswrapper[4757]: I1006 13:59:24.150567 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Oct 06 13:59:24 crc kubenswrapper[4757]: I1006 13:59:24.161240 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 06 13:59:24 crc kubenswrapper[4757]: I1006 13:59:24.195575 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a2eb0fb1-47fb-410c-b154-6171760d857c" path="/var/lib/kubelet/pods/a2eb0fb1-47fb-410c-b154-6171760d857c/volumes" Oct 06 13:59:24 crc kubenswrapper[4757]: I1006 13:59:24.196386 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="df05d43b-220d-4ccc-98b1-8c3f01cdd9f5" path="/var/lib/kubelet/pods/df05d43b-220d-4ccc-98b1-8c3f01cdd9f5/volumes" Oct 06 13:59:24 crc kubenswrapper[4757]: I1006 13:59:24.311535 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e55a2d6b-bbfd-4e55-8d60-352b8de971ca-public-tls-certs\") pod \"nova-api-0\" (UID: \"e55a2d6b-bbfd-4e55-8d60-352b8de971ca\") " pod="openstack/nova-api-0" Oct 06 13:59:24 crc kubenswrapper[4757]: I1006 13:59:24.311963 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e55a2d6b-bbfd-4e55-8d60-352b8de971ca-logs\") pod \"nova-api-0\" (UID: \"e55a2d6b-bbfd-4e55-8d60-352b8de971ca\") " pod="openstack/nova-api-0" Oct 06 13:59:24 crc kubenswrapper[4757]: I1006 13:59:24.312027 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e55a2d6b-bbfd-4e55-8d60-352b8de971ca-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"e55a2d6b-bbfd-4e55-8d60-352b8de971ca\") " pod="openstack/nova-api-0" Oct 06 13:59:24 crc kubenswrapper[4757]: I1006 13:59:24.312072 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xkkqr\" (UniqueName: \"kubernetes.io/projected/e55a2d6b-bbfd-4e55-8d60-352b8de971ca-kube-api-access-xkkqr\") pod \"nova-api-0\" (UID: \"e55a2d6b-bbfd-4e55-8d60-352b8de971ca\") " pod="openstack/nova-api-0" Oct 06 13:59:24 crc kubenswrapper[4757]: I1006 13:59:24.312168 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e55a2d6b-bbfd-4e55-8d60-352b8de971ca-config-data\") pod \"nova-api-0\" (UID: \"e55a2d6b-bbfd-4e55-8d60-352b8de971ca\") " pod="openstack/nova-api-0" Oct 06 13:59:24 crc kubenswrapper[4757]: I1006 13:59:24.313079 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e55a2d6b-bbfd-4e55-8d60-352b8de971ca-internal-tls-certs\") pod \"nova-api-0\" (UID: \"e55a2d6b-bbfd-4e55-8d60-352b8de971ca\") " pod="openstack/nova-api-0" Oct 06 13:59:24 crc kubenswrapper[4757]: I1006 13:59:24.415373 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e55a2d6b-bbfd-4e55-8d60-352b8de971ca-logs\") pod \"nova-api-0\" (UID: \"e55a2d6b-bbfd-4e55-8d60-352b8de971ca\") " pod="openstack/nova-api-0" Oct 06 13:59:24 crc kubenswrapper[4757]: I1006 13:59:24.415433 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e55a2d6b-bbfd-4e55-8d60-352b8de971ca-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"e55a2d6b-bbfd-4e55-8d60-352b8de971ca\") " pod="openstack/nova-api-0" Oct 06 13:59:24 crc kubenswrapper[4757]: I1006 13:59:24.415460 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xkkqr\" (UniqueName: \"kubernetes.io/projected/e55a2d6b-bbfd-4e55-8d60-352b8de971ca-kube-api-access-xkkqr\") pod \"nova-api-0\" (UID: \"e55a2d6b-bbfd-4e55-8d60-352b8de971ca\") " pod="openstack/nova-api-0" Oct 06 13:59:24 crc kubenswrapper[4757]: I1006 13:59:24.415518 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e55a2d6b-bbfd-4e55-8d60-352b8de971ca-config-data\") pod \"nova-api-0\" (UID: \"e55a2d6b-bbfd-4e55-8d60-352b8de971ca\") " pod="openstack/nova-api-0" Oct 06 13:59:24 crc kubenswrapper[4757]: I1006 13:59:24.415599 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e55a2d6b-bbfd-4e55-8d60-352b8de971ca-internal-tls-certs\") pod \"nova-api-0\" (UID: \"e55a2d6b-bbfd-4e55-8d60-352b8de971ca\") " pod="openstack/nova-api-0" Oct 06 13:59:24 crc kubenswrapper[4757]: I1006 13:59:24.415650 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e55a2d6b-bbfd-4e55-8d60-352b8de971ca-public-tls-certs\") pod \"nova-api-0\" (UID: \"e55a2d6b-bbfd-4e55-8d60-352b8de971ca\") " pod="openstack/nova-api-0" Oct 06 13:59:24 crc kubenswrapper[4757]: I1006 13:59:24.416624 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e55a2d6b-bbfd-4e55-8d60-352b8de971ca-logs\") pod \"nova-api-0\" (UID: \"e55a2d6b-bbfd-4e55-8d60-352b8de971ca\") " pod="openstack/nova-api-0" Oct 06 13:59:24 crc kubenswrapper[4757]: I1006 13:59:24.424911 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e55a2d6b-bbfd-4e55-8d60-352b8de971ca-config-data\") pod \"nova-api-0\" (UID: \"e55a2d6b-bbfd-4e55-8d60-352b8de971ca\") " pod="openstack/nova-api-0" Oct 06 13:59:24 crc kubenswrapper[4757]: I1006 13:59:24.435743 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xkkqr\" (UniqueName: \"kubernetes.io/projected/e55a2d6b-bbfd-4e55-8d60-352b8de971ca-kube-api-access-xkkqr\") pod \"nova-api-0\" (UID: \"e55a2d6b-bbfd-4e55-8d60-352b8de971ca\") " pod="openstack/nova-api-0" Oct 06 13:59:24 crc kubenswrapper[4757]: I1006 13:59:24.435943 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e55a2d6b-bbfd-4e55-8d60-352b8de971ca-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"e55a2d6b-bbfd-4e55-8d60-352b8de971ca\") " pod="openstack/nova-api-0" Oct 06 13:59:24 crc kubenswrapper[4757]: I1006 13:59:24.436536 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e55a2d6b-bbfd-4e55-8d60-352b8de971ca-public-tls-certs\") pod \"nova-api-0\" (UID: \"e55a2d6b-bbfd-4e55-8d60-352b8de971ca\") " pod="openstack/nova-api-0" Oct 06 13:59:24 crc kubenswrapper[4757]: I1006 13:59:24.444069 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e55a2d6b-bbfd-4e55-8d60-352b8de971ca-internal-tls-certs\") pod \"nova-api-0\" (UID: \"e55a2d6b-bbfd-4e55-8d60-352b8de971ca\") " pod="openstack/nova-api-0" Oct 06 13:59:24 crc kubenswrapper[4757]: I1006 13:59:24.472303 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 06 13:59:24 crc kubenswrapper[4757]: I1006 13:59:24.590385 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 06 13:59:24 crc kubenswrapper[4757]: W1006 13:59:24.598063 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod542090f5_d2d8_4f78_b566_10e9885c341e.slice/crio-49aea83301219707ca62b9e2d031d5d0032f9e74b8c6e1b344c23c65ee0d1fc3 WatchSource:0}: Error finding container 49aea83301219707ca62b9e2d031d5d0032f9e74b8c6e1b344c23c65ee0d1fc3: Status 404 returned error can't find the container with id 49aea83301219707ca62b9e2d031d5d0032f9e74b8c6e1b344c23c65ee0d1fc3 Oct 06 13:59:24 crc kubenswrapper[4757]: I1006 13:59:24.733767 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"542090f5-d2d8-4f78-b566-10e9885c341e","Type":"ContainerStarted","Data":"49aea83301219707ca62b9e2d031d5d0032f9e74b8c6e1b344c23c65ee0d1fc3"} Oct 06 13:59:24 crc kubenswrapper[4757]: I1006 13:59:24.915130 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 06 13:59:25 crc kubenswrapper[4757]: I1006 13:59:25.309009 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Oct 06 13:59:25 crc kubenswrapper[4757]: I1006 13:59:25.341856 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Oct 06 13:59:25 crc kubenswrapper[4757]: I1006 13:59:25.746862 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"e55a2d6b-bbfd-4e55-8d60-352b8de971ca","Type":"ContainerStarted","Data":"f98e70dd466b2783fb448f06201dc355f75e95eebe10594d844aa9036ef81bfa"} Oct 06 13:59:25 crc kubenswrapper[4757]: I1006 13:59:25.746909 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"e55a2d6b-bbfd-4e55-8d60-352b8de971ca","Type":"ContainerStarted","Data":"f3ab1b9b43bafe71dfeb86b04099d31e61c1c622d97b1cd4902dcceb8f6a7bb1"} Oct 06 13:59:25 crc kubenswrapper[4757]: I1006 13:59:25.746923 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"e55a2d6b-bbfd-4e55-8d60-352b8de971ca","Type":"ContainerStarted","Data":"79c592135749f4f5c2de5130aa626665d4535082cdcb9414be0cfca121352781"} Oct 06 13:59:25 crc kubenswrapper[4757]: I1006 13:59:25.748770 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"542090f5-d2d8-4f78-b566-10e9885c341e","Type":"ContainerStarted","Data":"d0a1424bce5344057fa545a1be5e25b8f2ea9bb5d886078a67599c0c2c3b96eb"} Oct 06 13:59:25 crc kubenswrapper[4757]: I1006 13:59:25.767257 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Oct 06 13:59:25 crc kubenswrapper[4757]: I1006 13:59:25.814114 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=1.814067306 podStartE2EDuration="1.814067306s" podCreationTimestamp="2025-10-06 13:59:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:59:25.787438515 +0000 UTC m=+1254.284757082" watchObservedRunningTime="2025-10-06 13:59:25.814067306 +0000 UTC m=+1254.311385853" Oct 06 13:59:26 crc kubenswrapper[4757]: I1006 13:59:26.062814 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-vsmtw"] Oct 06 13:59:26 crc kubenswrapper[4757]: I1006 13:59:26.064350 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-vsmtw" Oct 06 13:59:26 crc kubenswrapper[4757]: I1006 13:59:26.066511 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Oct 06 13:59:26 crc kubenswrapper[4757]: I1006 13:59:26.069807 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Oct 06 13:59:26 crc kubenswrapper[4757]: I1006 13:59:26.080501 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-vsmtw"] Oct 06 13:59:26 crc kubenswrapper[4757]: I1006 13:59:26.146305 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9c83c06-5d47-4561-aa37-2376cc54401d-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-vsmtw\" (UID: \"f9c83c06-5d47-4561-aa37-2376cc54401d\") " pod="openstack/nova-cell1-cell-mapping-vsmtw" Oct 06 13:59:26 crc kubenswrapper[4757]: I1006 13:59:26.146690 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xlxz2\" (UniqueName: \"kubernetes.io/projected/f9c83c06-5d47-4561-aa37-2376cc54401d-kube-api-access-xlxz2\") pod \"nova-cell1-cell-mapping-vsmtw\" (UID: \"f9c83c06-5d47-4561-aa37-2376cc54401d\") " pod="openstack/nova-cell1-cell-mapping-vsmtw" Oct 06 13:59:26 crc kubenswrapper[4757]: I1006 13:59:26.146746 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f9c83c06-5d47-4561-aa37-2376cc54401d-config-data\") pod \"nova-cell1-cell-mapping-vsmtw\" (UID: \"f9c83c06-5d47-4561-aa37-2376cc54401d\") " pod="openstack/nova-cell1-cell-mapping-vsmtw" Oct 06 13:59:26 crc kubenswrapper[4757]: I1006 13:59:26.146771 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f9c83c06-5d47-4561-aa37-2376cc54401d-scripts\") pod \"nova-cell1-cell-mapping-vsmtw\" (UID: \"f9c83c06-5d47-4561-aa37-2376cc54401d\") " pod="openstack/nova-cell1-cell-mapping-vsmtw" Oct 06 13:59:26 crc kubenswrapper[4757]: I1006 13:59:26.249703 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9c83c06-5d47-4561-aa37-2376cc54401d-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-vsmtw\" (UID: \"f9c83c06-5d47-4561-aa37-2376cc54401d\") " pod="openstack/nova-cell1-cell-mapping-vsmtw" Oct 06 13:59:26 crc kubenswrapper[4757]: I1006 13:59:26.249849 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xlxz2\" (UniqueName: \"kubernetes.io/projected/f9c83c06-5d47-4561-aa37-2376cc54401d-kube-api-access-xlxz2\") pod \"nova-cell1-cell-mapping-vsmtw\" (UID: \"f9c83c06-5d47-4561-aa37-2376cc54401d\") " pod="openstack/nova-cell1-cell-mapping-vsmtw" Oct 06 13:59:26 crc kubenswrapper[4757]: I1006 13:59:26.249911 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f9c83c06-5d47-4561-aa37-2376cc54401d-config-data\") pod \"nova-cell1-cell-mapping-vsmtw\" (UID: \"f9c83c06-5d47-4561-aa37-2376cc54401d\") " pod="openstack/nova-cell1-cell-mapping-vsmtw" Oct 06 13:59:26 crc kubenswrapper[4757]: I1006 13:59:26.249949 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f9c83c06-5d47-4561-aa37-2376cc54401d-scripts\") pod \"nova-cell1-cell-mapping-vsmtw\" (UID: \"f9c83c06-5d47-4561-aa37-2376cc54401d\") " pod="openstack/nova-cell1-cell-mapping-vsmtw" Oct 06 13:59:26 crc kubenswrapper[4757]: I1006 13:59:26.254539 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f9c83c06-5d47-4561-aa37-2376cc54401d-scripts\") pod \"nova-cell1-cell-mapping-vsmtw\" (UID: \"f9c83c06-5d47-4561-aa37-2376cc54401d\") " pod="openstack/nova-cell1-cell-mapping-vsmtw" Oct 06 13:59:26 crc kubenswrapper[4757]: I1006 13:59:26.254994 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9c83c06-5d47-4561-aa37-2376cc54401d-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-vsmtw\" (UID: \"f9c83c06-5d47-4561-aa37-2376cc54401d\") " pod="openstack/nova-cell1-cell-mapping-vsmtw" Oct 06 13:59:26 crc kubenswrapper[4757]: I1006 13:59:26.255291 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f9c83c06-5d47-4561-aa37-2376cc54401d-config-data\") pod \"nova-cell1-cell-mapping-vsmtw\" (UID: \"f9c83c06-5d47-4561-aa37-2376cc54401d\") " pod="openstack/nova-cell1-cell-mapping-vsmtw" Oct 06 13:59:26 crc kubenswrapper[4757]: I1006 13:59:26.275661 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xlxz2\" (UniqueName: \"kubernetes.io/projected/f9c83c06-5d47-4561-aa37-2376cc54401d-kube-api-access-xlxz2\") pod \"nova-cell1-cell-mapping-vsmtw\" (UID: \"f9c83c06-5d47-4561-aa37-2376cc54401d\") " pod="openstack/nova-cell1-cell-mapping-vsmtw" Oct 06 13:59:26 crc kubenswrapper[4757]: I1006 13:59:26.380174 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-vsmtw" Oct 06 13:59:27 crc kubenswrapper[4757]: I1006 13:59:26.763309 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"542090f5-d2d8-4f78-b566-10e9885c341e","Type":"ContainerStarted","Data":"d10bb205702b34e04df8f3d4dd7f8051ac798a998c12a2c61384341390d931e9"} Oct 06 13:59:27 crc kubenswrapper[4757]: I1006 13:59:26.763945 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"542090f5-d2d8-4f78-b566-10e9885c341e","Type":"ContainerStarted","Data":"bb3ce5af963b7888d8a2f2bac501984a31d2cf21692061032094f06beedecf66"} Oct 06 13:59:27 crc kubenswrapper[4757]: W1006 13:59:26.898160 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf9c83c06_5d47_4561_aa37_2376cc54401d.slice/crio-6cc6bb15e173d2ae9317e68d268053741e5b6eb4c70b22ce20bb3072d359c9cb WatchSource:0}: Error finding container 6cc6bb15e173d2ae9317e68d268053741e5b6eb4c70b22ce20bb3072d359c9cb: Status 404 returned error can't find the container with id 6cc6bb15e173d2ae9317e68d268053741e5b6eb4c70b22ce20bb3072d359c9cb Oct 06 13:59:27 crc kubenswrapper[4757]: I1006 13:59:26.906242 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-vsmtw"] Oct 06 13:59:27 crc kubenswrapper[4757]: I1006 13:59:27.676448 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5785d8b947-nsbsm" Oct 06 13:59:27 crc kubenswrapper[4757]: I1006 13:59:27.764271 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-86bc686f95-5b58c"] Oct 06 13:59:27 crc kubenswrapper[4757]: I1006 13:59:27.764544 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-86bc686f95-5b58c" podUID="c5749667-46f8-48cc-b934-772352cf8bf8" containerName="dnsmasq-dns" containerID="cri-o://b292c2f5c2a63c5dba9bae42fd7e9d0f6f92e7620da7b00639636430e7ae9460" gracePeriod=10 Oct 06 13:59:27 crc kubenswrapper[4757]: I1006 13:59:27.780869 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-vsmtw" event={"ID":"f9c83c06-5d47-4561-aa37-2376cc54401d","Type":"ContainerStarted","Data":"f43c9069102422c3978d089df3df7fb465e8c5a8088261091ed94d97c7fca5a0"} Oct 06 13:59:27 crc kubenswrapper[4757]: I1006 13:59:27.780911 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-vsmtw" event={"ID":"f9c83c06-5d47-4561-aa37-2376cc54401d","Type":"ContainerStarted","Data":"6cc6bb15e173d2ae9317e68d268053741e5b6eb4c70b22ce20bb3072d359c9cb"} Oct 06 13:59:27 crc kubenswrapper[4757]: I1006 13:59:27.812390 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-vsmtw" podStartSLOduration=1.8123739269999999 podStartE2EDuration="1.812373927s" podCreationTimestamp="2025-10-06 13:59:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:59:27.811151498 +0000 UTC m=+1256.308470065" watchObservedRunningTime="2025-10-06 13:59:27.812373927 +0000 UTC m=+1256.309692464" Oct 06 13:59:28 crc kubenswrapper[4757]: I1006 13:59:28.317974 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86bc686f95-5b58c" Oct 06 13:59:28 crc kubenswrapper[4757]: I1006 13:59:28.394533 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fz4vz\" (UniqueName: \"kubernetes.io/projected/c5749667-46f8-48cc-b934-772352cf8bf8-kube-api-access-fz4vz\") pod \"c5749667-46f8-48cc-b934-772352cf8bf8\" (UID: \"c5749667-46f8-48cc-b934-772352cf8bf8\") " Oct 06 13:59:28 crc kubenswrapper[4757]: I1006 13:59:28.394595 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c5749667-46f8-48cc-b934-772352cf8bf8-ovsdbserver-sb\") pod \"c5749667-46f8-48cc-b934-772352cf8bf8\" (UID: \"c5749667-46f8-48cc-b934-772352cf8bf8\") " Oct 06 13:59:28 crc kubenswrapper[4757]: I1006 13:59:28.394655 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c5749667-46f8-48cc-b934-772352cf8bf8-config\") pod \"c5749667-46f8-48cc-b934-772352cf8bf8\" (UID: \"c5749667-46f8-48cc-b934-772352cf8bf8\") " Oct 06 13:59:28 crc kubenswrapper[4757]: I1006 13:59:28.394701 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c5749667-46f8-48cc-b934-772352cf8bf8-dns-svc\") pod \"c5749667-46f8-48cc-b934-772352cf8bf8\" (UID: \"c5749667-46f8-48cc-b934-772352cf8bf8\") " Oct 06 13:59:28 crc kubenswrapper[4757]: I1006 13:59:28.394795 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c5749667-46f8-48cc-b934-772352cf8bf8-ovsdbserver-nb\") pod \"c5749667-46f8-48cc-b934-772352cf8bf8\" (UID: \"c5749667-46f8-48cc-b934-772352cf8bf8\") " Oct 06 13:59:28 crc kubenswrapper[4757]: I1006 13:59:28.394840 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c5749667-46f8-48cc-b934-772352cf8bf8-dns-swift-storage-0\") pod \"c5749667-46f8-48cc-b934-772352cf8bf8\" (UID: \"c5749667-46f8-48cc-b934-772352cf8bf8\") " Oct 06 13:59:28 crc kubenswrapper[4757]: I1006 13:59:28.400847 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c5749667-46f8-48cc-b934-772352cf8bf8-kube-api-access-fz4vz" (OuterVolumeSpecName: "kube-api-access-fz4vz") pod "c5749667-46f8-48cc-b934-772352cf8bf8" (UID: "c5749667-46f8-48cc-b934-772352cf8bf8"). InnerVolumeSpecName "kube-api-access-fz4vz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:59:28 crc kubenswrapper[4757]: I1006 13:59:28.447349 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c5749667-46f8-48cc-b934-772352cf8bf8-config" (OuterVolumeSpecName: "config") pod "c5749667-46f8-48cc-b934-772352cf8bf8" (UID: "c5749667-46f8-48cc-b934-772352cf8bf8"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:59:28 crc kubenswrapper[4757]: I1006 13:59:28.449770 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c5749667-46f8-48cc-b934-772352cf8bf8-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "c5749667-46f8-48cc-b934-772352cf8bf8" (UID: "c5749667-46f8-48cc-b934-772352cf8bf8"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:59:28 crc kubenswrapper[4757]: I1006 13:59:28.456487 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c5749667-46f8-48cc-b934-772352cf8bf8-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "c5749667-46f8-48cc-b934-772352cf8bf8" (UID: "c5749667-46f8-48cc-b934-772352cf8bf8"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:59:28 crc kubenswrapper[4757]: I1006 13:59:28.456531 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c5749667-46f8-48cc-b934-772352cf8bf8-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "c5749667-46f8-48cc-b934-772352cf8bf8" (UID: "c5749667-46f8-48cc-b934-772352cf8bf8"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:59:28 crc kubenswrapper[4757]: I1006 13:59:28.468718 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c5749667-46f8-48cc-b934-772352cf8bf8-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "c5749667-46f8-48cc-b934-772352cf8bf8" (UID: "c5749667-46f8-48cc-b934-772352cf8bf8"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 13:59:28 crc kubenswrapper[4757]: I1006 13:59:28.496833 4757 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c5749667-46f8-48cc-b934-772352cf8bf8-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 06 13:59:28 crc kubenswrapper[4757]: I1006 13:59:28.496873 4757 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c5749667-46f8-48cc-b934-772352cf8bf8-config\") on node \"crc\" DevicePath \"\"" Oct 06 13:59:28 crc kubenswrapper[4757]: I1006 13:59:28.496884 4757 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c5749667-46f8-48cc-b934-772352cf8bf8-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 06 13:59:28 crc kubenswrapper[4757]: I1006 13:59:28.496895 4757 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c5749667-46f8-48cc-b934-772352cf8bf8-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 06 13:59:28 crc kubenswrapper[4757]: I1006 13:59:28.496907 4757 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c5749667-46f8-48cc-b934-772352cf8bf8-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 06 13:59:28 crc kubenswrapper[4757]: I1006 13:59:28.496919 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fz4vz\" (UniqueName: \"kubernetes.io/projected/c5749667-46f8-48cc-b934-772352cf8bf8-kube-api-access-fz4vz\") on node \"crc\" DevicePath \"\"" Oct 06 13:59:28 crc kubenswrapper[4757]: I1006 13:59:28.794234 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"542090f5-d2d8-4f78-b566-10e9885c341e","Type":"ContainerStarted","Data":"85aed47e17be02089454ae709bb98e02a653cd57a389079af7c1e794005db080"} Oct 06 13:59:28 crc kubenswrapper[4757]: I1006 13:59:28.794466 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 06 13:59:28 crc kubenswrapper[4757]: I1006 13:59:28.796577 4757 generic.go:334] "Generic (PLEG): container finished" podID="c5749667-46f8-48cc-b934-772352cf8bf8" containerID="b292c2f5c2a63c5dba9bae42fd7e9d0f6f92e7620da7b00639636430e7ae9460" exitCode=0 Oct 06 13:59:28 crc kubenswrapper[4757]: I1006 13:59:28.796659 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86bc686f95-5b58c" event={"ID":"c5749667-46f8-48cc-b934-772352cf8bf8","Type":"ContainerDied","Data":"b292c2f5c2a63c5dba9bae42fd7e9d0f6f92e7620da7b00639636430e7ae9460"} Oct 06 13:59:28 crc kubenswrapper[4757]: I1006 13:59:28.796690 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86bc686f95-5b58c" event={"ID":"c5749667-46f8-48cc-b934-772352cf8bf8","Type":"ContainerDied","Data":"ba996403da0d919881e318aa6c8a3713413499e1caa7eaf03f3d9758abdaf492"} Oct 06 13:59:28 crc kubenswrapper[4757]: I1006 13:59:28.796710 4757 scope.go:117] "RemoveContainer" containerID="b292c2f5c2a63c5dba9bae42fd7e9d0f6f92e7620da7b00639636430e7ae9460" Oct 06 13:59:28 crc kubenswrapper[4757]: I1006 13:59:28.796905 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86bc686f95-5b58c" Oct 06 13:59:28 crc kubenswrapper[4757]: I1006 13:59:28.829272 4757 scope.go:117] "RemoveContainer" containerID="6de0306454b4cc8935eca0d925d6b5d1b637f60c8d4a7c27c8958a30520dfb2f" Oct 06 13:59:28 crc kubenswrapper[4757]: I1006 13:59:28.834436 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.390726202 podStartE2EDuration="5.834415095s" podCreationTimestamp="2025-10-06 13:59:23 +0000 UTC" firstStartedPulling="2025-10-06 13:59:24.601082659 +0000 UTC m=+1253.098401196" lastFinishedPulling="2025-10-06 13:59:28.044771552 +0000 UTC m=+1256.542090089" observedRunningTime="2025-10-06 13:59:28.829647443 +0000 UTC m=+1257.326965980" watchObservedRunningTime="2025-10-06 13:59:28.834415095 +0000 UTC m=+1257.331733632" Oct 06 13:59:28 crc kubenswrapper[4757]: I1006 13:59:28.866270 4757 scope.go:117] "RemoveContainer" containerID="b292c2f5c2a63c5dba9bae42fd7e9d0f6f92e7620da7b00639636430e7ae9460" Oct 06 13:59:28 crc kubenswrapper[4757]: E1006 13:59:28.870237 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b292c2f5c2a63c5dba9bae42fd7e9d0f6f92e7620da7b00639636430e7ae9460\": container with ID starting with b292c2f5c2a63c5dba9bae42fd7e9d0f6f92e7620da7b00639636430e7ae9460 not found: ID does not exist" containerID="b292c2f5c2a63c5dba9bae42fd7e9d0f6f92e7620da7b00639636430e7ae9460" Oct 06 13:59:28 crc kubenswrapper[4757]: I1006 13:59:28.870276 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b292c2f5c2a63c5dba9bae42fd7e9d0f6f92e7620da7b00639636430e7ae9460"} err="failed to get container status \"b292c2f5c2a63c5dba9bae42fd7e9d0f6f92e7620da7b00639636430e7ae9460\": rpc error: code = NotFound desc = could not find container \"b292c2f5c2a63c5dba9bae42fd7e9d0f6f92e7620da7b00639636430e7ae9460\": container with ID starting with b292c2f5c2a63c5dba9bae42fd7e9d0f6f92e7620da7b00639636430e7ae9460 not found: ID does not exist" Oct 06 13:59:28 crc kubenswrapper[4757]: I1006 13:59:28.870303 4757 scope.go:117] "RemoveContainer" containerID="6de0306454b4cc8935eca0d925d6b5d1b637f60c8d4a7c27c8958a30520dfb2f" Oct 06 13:59:28 crc kubenswrapper[4757]: E1006 13:59:28.877233 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6de0306454b4cc8935eca0d925d6b5d1b637f60c8d4a7c27c8958a30520dfb2f\": container with ID starting with 6de0306454b4cc8935eca0d925d6b5d1b637f60c8d4a7c27c8958a30520dfb2f not found: ID does not exist" containerID="6de0306454b4cc8935eca0d925d6b5d1b637f60c8d4a7c27c8958a30520dfb2f" Oct 06 13:59:28 crc kubenswrapper[4757]: I1006 13:59:28.877276 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6de0306454b4cc8935eca0d925d6b5d1b637f60c8d4a7c27c8958a30520dfb2f"} err="failed to get container status \"6de0306454b4cc8935eca0d925d6b5d1b637f60c8d4a7c27c8958a30520dfb2f\": rpc error: code = NotFound desc = could not find container \"6de0306454b4cc8935eca0d925d6b5d1b637f60c8d4a7c27c8958a30520dfb2f\": container with ID starting with 6de0306454b4cc8935eca0d925d6b5d1b637f60c8d4a7c27c8958a30520dfb2f not found: ID does not exist" Oct 06 13:59:28 crc kubenswrapper[4757]: I1006 13:59:28.898156 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-86bc686f95-5b58c"] Oct 06 13:59:28 crc kubenswrapper[4757]: I1006 13:59:28.915206 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-86bc686f95-5b58c"] Oct 06 13:59:30 crc kubenswrapper[4757]: I1006 13:59:30.199594 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c5749667-46f8-48cc-b934-772352cf8bf8" path="/var/lib/kubelet/pods/c5749667-46f8-48cc-b934-772352cf8bf8/volumes" Oct 06 13:59:31 crc kubenswrapper[4757]: I1006 13:59:31.834206 4757 generic.go:334] "Generic (PLEG): container finished" podID="f9c83c06-5d47-4561-aa37-2376cc54401d" containerID="f43c9069102422c3978d089df3df7fb465e8c5a8088261091ed94d97c7fca5a0" exitCode=0 Oct 06 13:59:31 crc kubenswrapper[4757]: I1006 13:59:31.834308 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-vsmtw" event={"ID":"f9c83c06-5d47-4561-aa37-2376cc54401d","Type":"ContainerDied","Data":"f43c9069102422c3978d089df3df7fb465e8c5a8088261091ed94d97c7fca5a0"} Oct 06 13:59:33 crc kubenswrapper[4757]: I1006 13:59:33.146696 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-vsmtw" Oct 06 13:59:33 crc kubenswrapper[4757]: I1006 13:59:33.290820 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f9c83c06-5d47-4561-aa37-2376cc54401d-scripts\") pod \"f9c83c06-5d47-4561-aa37-2376cc54401d\" (UID: \"f9c83c06-5d47-4561-aa37-2376cc54401d\") " Oct 06 13:59:33 crc kubenswrapper[4757]: I1006 13:59:33.290895 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xlxz2\" (UniqueName: \"kubernetes.io/projected/f9c83c06-5d47-4561-aa37-2376cc54401d-kube-api-access-xlxz2\") pod \"f9c83c06-5d47-4561-aa37-2376cc54401d\" (UID: \"f9c83c06-5d47-4561-aa37-2376cc54401d\") " Oct 06 13:59:33 crc kubenswrapper[4757]: I1006 13:59:33.291064 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f9c83c06-5d47-4561-aa37-2376cc54401d-config-data\") pod \"f9c83c06-5d47-4561-aa37-2376cc54401d\" (UID: \"f9c83c06-5d47-4561-aa37-2376cc54401d\") " Oct 06 13:59:33 crc kubenswrapper[4757]: I1006 13:59:33.291127 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9c83c06-5d47-4561-aa37-2376cc54401d-combined-ca-bundle\") pod \"f9c83c06-5d47-4561-aa37-2376cc54401d\" (UID: \"f9c83c06-5d47-4561-aa37-2376cc54401d\") " Oct 06 13:59:33 crc kubenswrapper[4757]: I1006 13:59:33.297829 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f9c83c06-5d47-4561-aa37-2376cc54401d-scripts" (OuterVolumeSpecName: "scripts") pod "f9c83c06-5d47-4561-aa37-2376cc54401d" (UID: "f9c83c06-5d47-4561-aa37-2376cc54401d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:59:33 crc kubenswrapper[4757]: I1006 13:59:33.299462 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f9c83c06-5d47-4561-aa37-2376cc54401d-kube-api-access-xlxz2" (OuterVolumeSpecName: "kube-api-access-xlxz2") pod "f9c83c06-5d47-4561-aa37-2376cc54401d" (UID: "f9c83c06-5d47-4561-aa37-2376cc54401d"). InnerVolumeSpecName "kube-api-access-xlxz2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:59:33 crc kubenswrapper[4757]: I1006 13:59:33.328245 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f9c83c06-5d47-4561-aa37-2376cc54401d-config-data" (OuterVolumeSpecName: "config-data") pod "f9c83c06-5d47-4561-aa37-2376cc54401d" (UID: "f9c83c06-5d47-4561-aa37-2376cc54401d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:59:33 crc kubenswrapper[4757]: I1006 13:59:33.331824 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f9c83c06-5d47-4561-aa37-2376cc54401d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f9c83c06-5d47-4561-aa37-2376cc54401d" (UID: "f9c83c06-5d47-4561-aa37-2376cc54401d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:59:33 crc kubenswrapper[4757]: I1006 13:59:33.393808 4757 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f9c83c06-5d47-4561-aa37-2376cc54401d-config-data\") on node \"crc\" DevicePath \"\"" Oct 06 13:59:33 crc kubenswrapper[4757]: I1006 13:59:33.393833 4757 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9c83c06-5d47-4561-aa37-2376cc54401d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 06 13:59:33 crc kubenswrapper[4757]: I1006 13:59:33.393842 4757 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f9c83c06-5d47-4561-aa37-2376cc54401d-scripts\") on node \"crc\" DevicePath \"\"" Oct 06 13:59:33 crc kubenswrapper[4757]: I1006 13:59:33.393850 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xlxz2\" (UniqueName: \"kubernetes.io/projected/f9c83c06-5d47-4561-aa37-2376cc54401d-kube-api-access-xlxz2\") on node \"crc\" DevicePath \"\"" Oct 06 13:59:33 crc kubenswrapper[4757]: I1006 13:59:33.862254 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-vsmtw" event={"ID":"f9c83c06-5d47-4561-aa37-2376cc54401d","Type":"ContainerDied","Data":"6cc6bb15e173d2ae9317e68d268053741e5b6eb4c70b22ce20bb3072d359c9cb"} Oct 06 13:59:33 crc kubenswrapper[4757]: I1006 13:59:33.862308 4757 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6cc6bb15e173d2ae9317e68d268053741e5b6eb4c70b22ce20bb3072d359c9cb" Oct 06 13:59:33 crc kubenswrapper[4757]: I1006 13:59:33.862339 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-vsmtw" Oct 06 13:59:34 crc kubenswrapper[4757]: I1006 13:59:34.085912 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 06 13:59:34 crc kubenswrapper[4757]: I1006 13:59:34.086355 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="acb2a4db-c5b2-4d68-a1ab-bd3fcdf578cd" containerName="nova-scheduler-scheduler" containerID="cri-o://cc57a918f77c5a7708d90b3d81c02a594ecb2bbeab6536376c5a5cbb9a9fb91a" gracePeriod=30 Oct 06 13:59:34 crc kubenswrapper[4757]: I1006 13:59:34.099484 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 06 13:59:34 crc kubenswrapper[4757]: I1006 13:59:34.099729 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="e55a2d6b-bbfd-4e55-8d60-352b8de971ca" containerName="nova-api-log" containerID="cri-o://f3ab1b9b43bafe71dfeb86b04099d31e61c1c622d97b1cd4902dcceb8f6a7bb1" gracePeriod=30 Oct 06 13:59:34 crc kubenswrapper[4757]: I1006 13:59:34.099852 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="e55a2d6b-bbfd-4e55-8d60-352b8de971ca" containerName="nova-api-api" containerID="cri-o://f98e70dd466b2783fb448f06201dc355f75e95eebe10594d844aa9036ef81bfa" gracePeriod=30 Oct 06 13:59:34 crc kubenswrapper[4757]: I1006 13:59:34.142668 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 06 13:59:34 crc kubenswrapper[4757]: I1006 13:59:34.142893 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="3dac5395-f6e7-4731-af9c-813f1863e380" containerName="nova-metadata-log" containerID="cri-o://8907f890429a8a18c28d5bc247e937c79db73296823725bac7d87caa3d5feb27" gracePeriod=30 Oct 06 13:59:34 crc kubenswrapper[4757]: I1006 13:59:34.143018 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="3dac5395-f6e7-4731-af9c-813f1863e380" containerName="nova-metadata-metadata" containerID="cri-o://801178c0cb4ca905bb061c23b2ba73d8ef853b39f1f006e92aaa5766c0055f4a" gracePeriod=30 Oct 06 13:59:34 crc kubenswrapper[4757]: I1006 13:59:34.680356 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 06 13:59:34 crc kubenswrapper[4757]: I1006 13:59:34.819252 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xkkqr\" (UniqueName: \"kubernetes.io/projected/e55a2d6b-bbfd-4e55-8d60-352b8de971ca-kube-api-access-xkkqr\") pod \"e55a2d6b-bbfd-4e55-8d60-352b8de971ca\" (UID: \"e55a2d6b-bbfd-4e55-8d60-352b8de971ca\") " Oct 06 13:59:34 crc kubenswrapper[4757]: I1006 13:59:34.819351 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e55a2d6b-bbfd-4e55-8d60-352b8de971ca-combined-ca-bundle\") pod \"e55a2d6b-bbfd-4e55-8d60-352b8de971ca\" (UID: \"e55a2d6b-bbfd-4e55-8d60-352b8de971ca\") " Oct 06 13:59:34 crc kubenswrapper[4757]: I1006 13:59:34.819490 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e55a2d6b-bbfd-4e55-8d60-352b8de971ca-config-data\") pod \"e55a2d6b-bbfd-4e55-8d60-352b8de971ca\" (UID: \"e55a2d6b-bbfd-4e55-8d60-352b8de971ca\") " Oct 06 13:59:34 crc kubenswrapper[4757]: I1006 13:59:34.819536 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e55a2d6b-bbfd-4e55-8d60-352b8de971ca-logs\") pod \"e55a2d6b-bbfd-4e55-8d60-352b8de971ca\" (UID: \"e55a2d6b-bbfd-4e55-8d60-352b8de971ca\") " Oct 06 13:59:34 crc kubenswrapper[4757]: I1006 13:59:34.819573 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e55a2d6b-bbfd-4e55-8d60-352b8de971ca-internal-tls-certs\") pod \"e55a2d6b-bbfd-4e55-8d60-352b8de971ca\" (UID: \"e55a2d6b-bbfd-4e55-8d60-352b8de971ca\") " Oct 06 13:59:34 crc kubenswrapper[4757]: I1006 13:59:34.819610 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e55a2d6b-bbfd-4e55-8d60-352b8de971ca-public-tls-certs\") pod \"e55a2d6b-bbfd-4e55-8d60-352b8de971ca\" (UID: \"e55a2d6b-bbfd-4e55-8d60-352b8de971ca\") " Oct 06 13:59:34 crc kubenswrapper[4757]: I1006 13:59:34.820375 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e55a2d6b-bbfd-4e55-8d60-352b8de971ca-logs" (OuterVolumeSpecName: "logs") pod "e55a2d6b-bbfd-4e55-8d60-352b8de971ca" (UID: "e55a2d6b-bbfd-4e55-8d60-352b8de971ca"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 13:59:34 crc kubenswrapper[4757]: I1006 13:59:34.825023 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e55a2d6b-bbfd-4e55-8d60-352b8de971ca-kube-api-access-xkkqr" (OuterVolumeSpecName: "kube-api-access-xkkqr") pod "e55a2d6b-bbfd-4e55-8d60-352b8de971ca" (UID: "e55a2d6b-bbfd-4e55-8d60-352b8de971ca"). InnerVolumeSpecName "kube-api-access-xkkqr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:59:34 crc kubenswrapper[4757]: I1006 13:59:34.849991 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e55a2d6b-bbfd-4e55-8d60-352b8de971ca-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e55a2d6b-bbfd-4e55-8d60-352b8de971ca" (UID: "e55a2d6b-bbfd-4e55-8d60-352b8de971ca"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:59:34 crc kubenswrapper[4757]: I1006 13:59:34.858529 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e55a2d6b-bbfd-4e55-8d60-352b8de971ca-config-data" (OuterVolumeSpecName: "config-data") pod "e55a2d6b-bbfd-4e55-8d60-352b8de971ca" (UID: "e55a2d6b-bbfd-4e55-8d60-352b8de971ca"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:59:34 crc kubenswrapper[4757]: I1006 13:59:34.880574 4757 generic.go:334] "Generic (PLEG): container finished" podID="e55a2d6b-bbfd-4e55-8d60-352b8de971ca" containerID="f98e70dd466b2783fb448f06201dc355f75e95eebe10594d844aa9036ef81bfa" exitCode=0 Oct 06 13:59:34 crc kubenswrapper[4757]: I1006 13:59:34.880630 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 06 13:59:34 crc kubenswrapper[4757]: I1006 13:59:34.880648 4757 generic.go:334] "Generic (PLEG): container finished" podID="e55a2d6b-bbfd-4e55-8d60-352b8de971ca" containerID="f3ab1b9b43bafe71dfeb86b04099d31e61c1c622d97b1cd4902dcceb8f6a7bb1" exitCode=143 Oct 06 13:59:34 crc kubenswrapper[4757]: I1006 13:59:34.880757 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"e55a2d6b-bbfd-4e55-8d60-352b8de971ca","Type":"ContainerDied","Data":"f98e70dd466b2783fb448f06201dc355f75e95eebe10594d844aa9036ef81bfa"} Oct 06 13:59:34 crc kubenswrapper[4757]: I1006 13:59:34.880811 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"e55a2d6b-bbfd-4e55-8d60-352b8de971ca","Type":"ContainerDied","Data":"f3ab1b9b43bafe71dfeb86b04099d31e61c1c622d97b1cd4902dcceb8f6a7bb1"} Oct 06 13:59:34 crc kubenswrapper[4757]: I1006 13:59:34.880839 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"e55a2d6b-bbfd-4e55-8d60-352b8de971ca","Type":"ContainerDied","Data":"79c592135749f4f5c2de5130aa626665d4535082cdcb9414be0cfca121352781"} Oct 06 13:59:34 crc kubenswrapper[4757]: I1006 13:59:34.880873 4757 scope.go:117] "RemoveContainer" containerID="f98e70dd466b2783fb448f06201dc355f75e95eebe10594d844aa9036ef81bfa" Oct 06 13:59:34 crc kubenswrapper[4757]: I1006 13:59:34.885979 4757 generic.go:334] "Generic (PLEG): container finished" podID="3dac5395-f6e7-4731-af9c-813f1863e380" containerID="8907f890429a8a18c28d5bc247e937c79db73296823725bac7d87caa3d5feb27" exitCode=143 Oct 06 13:59:34 crc kubenswrapper[4757]: I1006 13:59:34.886033 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"3dac5395-f6e7-4731-af9c-813f1863e380","Type":"ContainerDied","Data":"8907f890429a8a18c28d5bc247e937c79db73296823725bac7d87caa3d5feb27"} Oct 06 13:59:34 crc kubenswrapper[4757]: I1006 13:59:34.900855 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e55a2d6b-bbfd-4e55-8d60-352b8de971ca-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "e55a2d6b-bbfd-4e55-8d60-352b8de971ca" (UID: "e55a2d6b-bbfd-4e55-8d60-352b8de971ca"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:59:34 crc kubenswrapper[4757]: I1006 13:59:34.908265 4757 scope.go:117] "RemoveContainer" containerID="f3ab1b9b43bafe71dfeb86b04099d31e61c1c622d97b1cd4902dcceb8f6a7bb1" Oct 06 13:59:34 crc kubenswrapper[4757]: I1006 13:59:34.915504 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e55a2d6b-bbfd-4e55-8d60-352b8de971ca-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "e55a2d6b-bbfd-4e55-8d60-352b8de971ca" (UID: "e55a2d6b-bbfd-4e55-8d60-352b8de971ca"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:59:34 crc kubenswrapper[4757]: I1006 13:59:34.922386 4757 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e55a2d6b-bbfd-4e55-8d60-352b8de971ca-config-data\") on node \"crc\" DevicePath \"\"" Oct 06 13:59:34 crc kubenswrapper[4757]: I1006 13:59:34.922436 4757 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e55a2d6b-bbfd-4e55-8d60-352b8de971ca-logs\") on node \"crc\" DevicePath \"\"" Oct 06 13:59:34 crc kubenswrapper[4757]: I1006 13:59:34.922445 4757 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e55a2d6b-bbfd-4e55-8d60-352b8de971ca-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 06 13:59:34 crc kubenswrapper[4757]: I1006 13:59:34.922455 4757 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e55a2d6b-bbfd-4e55-8d60-352b8de971ca-public-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 06 13:59:34 crc kubenswrapper[4757]: I1006 13:59:34.922464 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xkkqr\" (UniqueName: \"kubernetes.io/projected/e55a2d6b-bbfd-4e55-8d60-352b8de971ca-kube-api-access-xkkqr\") on node \"crc\" DevicePath \"\"" Oct 06 13:59:34 crc kubenswrapper[4757]: I1006 13:59:34.922472 4757 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e55a2d6b-bbfd-4e55-8d60-352b8de971ca-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 06 13:59:34 crc kubenswrapper[4757]: I1006 13:59:34.928150 4757 scope.go:117] "RemoveContainer" containerID="f98e70dd466b2783fb448f06201dc355f75e95eebe10594d844aa9036ef81bfa" Oct 06 13:59:34 crc kubenswrapper[4757]: E1006 13:59:34.928685 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f98e70dd466b2783fb448f06201dc355f75e95eebe10594d844aa9036ef81bfa\": container with ID starting with f98e70dd466b2783fb448f06201dc355f75e95eebe10594d844aa9036ef81bfa not found: ID does not exist" containerID="f98e70dd466b2783fb448f06201dc355f75e95eebe10594d844aa9036ef81bfa" Oct 06 13:59:34 crc kubenswrapper[4757]: I1006 13:59:34.928725 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f98e70dd466b2783fb448f06201dc355f75e95eebe10594d844aa9036ef81bfa"} err="failed to get container status \"f98e70dd466b2783fb448f06201dc355f75e95eebe10594d844aa9036ef81bfa\": rpc error: code = NotFound desc = could not find container \"f98e70dd466b2783fb448f06201dc355f75e95eebe10594d844aa9036ef81bfa\": container with ID starting with f98e70dd466b2783fb448f06201dc355f75e95eebe10594d844aa9036ef81bfa not found: ID does not exist" Oct 06 13:59:34 crc kubenswrapper[4757]: I1006 13:59:34.928747 4757 scope.go:117] "RemoveContainer" containerID="f3ab1b9b43bafe71dfeb86b04099d31e61c1c622d97b1cd4902dcceb8f6a7bb1" Oct 06 13:59:34 crc kubenswrapper[4757]: E1006 13:59:34.929339 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f3ab1b9b43bafe71dfeb86b04099d31e61c1c622d97b1cd4902dcceb8f6a7bb1\": container with ID starting with f3ab1b9b43bafe71dfeb86b04099d31e61c1c622d97b1cd4902dcceb8f6a7bb1 not found: ID does not exist" containerID="f3ab1b9b43bafe71dfeb86b04099d31e61c1c622d97b1cd4902dcceb8f6a7bb1" Oct 06 13:59:34 crc kubenswrapper[4757]: I1006 13:59:34.929367 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f3ab1b9b43bafe71dfeb86b04099d31e61c1c622d97b1cd4902dcceb8f6a7bb1"} err="failed to get container status \"f3ab1b9b43bafe71dfeb86b04099d31e61c1c622d97b1cd4902dcceb8f6a7bb1\": rpc error: code = NotFound desc = could not find container \"f3ab1b9b43bafe71dfeb86b04099d31e61c1c622d97b1cd4902dcceb8f6a7bb1\": container with ID starting with f3ab1b9b43bafe71dfeb86b04099d31e61c1c622d97b1cd4902dcceb8f6a7bb1 not found: ID does not exist" Oct 06 13:59:34 crc kubenswrapper[4757]: I1006 13:59:34.929382 4757 scope.go:117] "RemoveContainer" containerID="f98e70dd466b2783fb448f06201dc355f75e95eebe10594d844aa9036ef81bfa" Oct 06 13:59:34 crc kubenswrapper[4757]: I1006 13:59:34.929598 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f98e70dd466b2783fb448f06201dc355f75e95eebe10594d844aa9036ef81bfa"} err="failed to get container status \"f98e70dd466b2783fb448f06201dc355f75e95eebe10594d844aa9036ef81bfa\": rpc error: code = NotFound desc = could not find container \"f98e70dd466b2783fb448f06201dc355f75e95eebe10594d844aa9036ef81bfa\": container with ID starting with f98e70dd466b2783fb448f06201dc355f75e95eebe10594d844aa9036ef81bfa not found: ID does not exist" Oct 06 13:59:34 crc kubenswrapper[4757]: I1006 13:59:34.929629 4757 scope.go:117] "RemoveContainer" containerID="f3ab1b9b43bafe71dfeb86b04099d31e61c1c622d97b1cd4902dcceb8f6a7bb1" Oct 06 13:59:34 crc kubenswrapper[4757]: I1006 13:59:34.929939 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f3ab1b9b43bafe71dfeb86b04099d31e61c1c622d97b1cd4902dcceb8f6a7bb1"} err="failed to get container status \"f3ab1b9b43bafe71dfeb86b04099d31e61c1c622d97b1cd4902dcceb8f6a7bb1\": rpc error: code = NotFound desc = could not find container \"f3ab1b9b43bafe71dfeb86b04099d31e61c1c622d97b1cd4902dcceb8f6a7bb1\": container with ID starting with f3ab1b9b43bafe71dfeb86b04099d31e61c1c622d97b1cd4902dcceb8f6a7bb1 not found: ID does not exist" Oct 06 13:59:35 crc kubenswrapper[4757]: I1006 13:59:35.212475 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 06 13:59:35 crc kubenswrapper[4757]: I1006 13:59:35.222822 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Oct 06 13:59:35 crc kubenswrapper[4757]: I1006 13:59:35.250178 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Oct 06 13:59:35 crc kubenswrapper[4757]: E1006 13:59:35.250641 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5749667-46f8-48cc-b934-772352cf8bf8" containerName="dnsmasq-dns" Oct 06 13:59:35 crc kubenswrapper[4757]: I1006 13:59:35.250666 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5749667-46f8-48cc-b934-772352cf8bf8" containerName="dnsmasq-dns" Oct 06 13:59:35 crc kubenswrapper[4757]: E1006 13:59:35.250695 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9c83c06-5d47-4561-aa37-2376cc54401d" containerName="nova-manage" Oct 06 13:59:35 crc kubenswrapper[4757]: I1006 13:59:35.250702 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9c83c06-5d47-4561-aa37-2376cc54401d" containerName="nova-manage" Oct 06 13:59:35 crc kubenswrapper[4757]: E1006 13:59:35.250726 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e55a2d6b-bbfd-4e55-8d60-352b8de971ca" containerName="nova-api-api" Oct 06 13:59:35 crc kubenswrapper[4757]: I1006 13:59:35.250734 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="e55a2d6b-bbfd-4e55-8d60-352b8de971ca" containerName="nova-api-api" Oct 06 13:59:35 crc kubenswrapper[4757]: E1006 13:59:35.250750 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5749667-46f8-48cc-b934-772352cf8bf8" containerName="init" Oct 06 13:59:35 crc kubenswrapper[4757]: I1006 13:59:35.250757 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5749667-46f8-48cc-b934-772352cf8bf8" containerName="init" Oct 06 13:59:35 crc kubenswrapper[4757]: E1006 13:59:35.250765 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e55a2d6b-bbfd-4e55-8d60-352b8de971ca" containerName="nova-api-log" Oct 06 13:59:35 crc kubenswrapper[4757]: I1006 13:59:35.250772 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="e55a2d6b-bbfd-4e55-8d60-352b8de971ca" containerName="nova-api-log" Oct 06 13:59:35 crc kubenswrapper[4757]: I1006 13:59:35.250987 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="e55a2d6b-bbfd-4e55-8d60-352b8de971ca" containerName="nova-api-api" Oct 06 13:59:35 crc kubenswrapper[4757]: I1006 13:59:35.251007 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="e55a2d6b-bbfd-4e55-8d60-352b8de971ca" containerName="nova-api-log" Oct 06 13:59:35 crc kubenswrapper[4757]: I1006 13:59:35.251020 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="c5749667-46f8-48cc-b934-772352cf8bf8" containerName="dnsmasq-dns" Oct 06 13:59:35 crc kubenswrapper[4757]: I1006 13:59:35.251045 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="f9c83c06-5d47-4561-aa37-2376cc54401d" containerName="nova-manage" Oct 06 13:59:35 crc kubenswrapper[4757]: I1006 13:59:35.255301 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 06 13:59:35 crc kubenswrapper[4757]: I1006 13:59:35.259928 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Oct 06 13:59:35 crc kubenswrapper[4757]: I1006 13:59:35.260804 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Oct 06 13:59:35 crc kubenswrapper[4757]: I1006 13:59:35.260970 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Oct 06 13:59:35 crc kubenswrapper[4757]: I1006 13:59:35.276709 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 06 13:59:35 crc kubenswrapper[4757]: I1006 13:59:35.330117 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c84ddadb-263d-4a4a-bc3f-b645c449e392-public-tls-certs\") pod \"nova-api-0\" (UID: \"c84ddadb-263d-4a4a-bc3f-b645c449e392\") " pod="openstack/nova-api-0" Oct 06 13:59:35 crc kubenswrapper[4757]: I1006 13:59:35.330213 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c84ddadb-263d-4a4a-bc3f-b645c449e392-config-data\") pod \"nova-api-0\" (UID: \"c84ddadb-263d-4a4a-bc3f-b645c449e392\") " pod="openstack/nova-api-0" Oct 06 13:59:35 crc kubenswrapper[4757]: I1006 13:59:35.330289 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c84ddadb-263d-4a4a-bc3f-b645c449e392-internal-tls-certs\") pod \"nova-api-0\" (UID: \"c84ddadb-263d-4a4a-bc3f-b645c449e392\") " pod="openstack/nova-api-0" Oct 06 13:59:35 crc kubenswrapper[4757]: I1006 13:59:35.330321 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w9mfm\" (UniqueName: \"kubernetes.io/projected/c84ddadb-263d-4a4a-bc3f-b645c449e392-kube-api-access-w9mfm\") pod \"nova-api-0\" (UID: \"c84ddadb-263d-4a4a-bc3f-b645c449e392\") " pod="openstack/nova-api-0" Oct 06 13:59:35 crc kubenswrapper[4757]: I1006 13:59:35.330379 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c84ddadb-263d-4a4a-bc3f-b645c449e392-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"c84ddadb-263d-4a4a-bc3f-b645c449e392\") " pod="openstack/nova-api-0" Oct 06 13:59:35 crc kubenswrapper[4757]: I1006 13:59:35.330519 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c84ddadb-263d-4a4a-bc3f-b645c449e392-logs\") pod \"nova-api-0\" (UID: \"c84ddadb-263d-4a4a-bc3f-b645c449e392\") " pod="openstack/nova-api-0" Oct 06 13:59:35 crc kubenswrapper[4757]: I1006 13:59:35.432547 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c84ddadb-263d-4a4a-bc3f-b645c449e392-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"c84ddadb-263d-4a4a-bc3f-b645c449e392\") " pod="openstack/nova-api-0" Oct 06 13:59:35 crc kubenswrapper[4757]: I1006 13:59:35.432622 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c84ddadb-263d-4a4a-bc3f-b645c449e392-logs\") pod \"nova-api-0\" (UID: \"c84ddadb-263d-4a4a-bc3f-b645c449e392\") " pod="openstack/nova-api-0" Oct 06 13:59:35 crc kubenswrapper[4757]: I1006 13:59:35.432744 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c84ddadb-263d-4a4a-bc3f-b645c449e392-public-tls-certs\") pod \"nova-api-0\" (UID: \"c84ddadb-263d-4a4a-bc3f-b645c449e392\") " pod="openstack/nova-api-0" Oct 06 13:59:35 crc kubenswrapper[4757]: I1006 13:59:35.432786 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c84ddadb-263d-4a4a-bc3f-b645c449e392-config-data\") pod \"nova-api-0\" (UID: \"c84ddadb-263d-4a4a-bc3f-b645c449e392\") " pod="openstack/nova-api-0" Oct 06 13:59:35 crc kubenswrapper[4757]: I1006 13:59:35.432855 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c84ddadb-263d-4a4a-bc3f-b645c449e392-internal-tls-certs\") pod \"nova-api-0\" (UID: \"c84ddadb-263d-4a4a-bc3f-b645c449e392\") " pod="openstack/nova-api-0" Oct 06 13:59:35 crc kubenswrapper[4757]: I1006 13:59:35.432883 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w9mfm\" (UniqueName: \"kubernetes.io/projected/c84ddadb-263d-4a4a-bc3f-b645c449e392-kube-api-access-w9mfm\") pod \"nova-api-0\" (UID: \"c84ddadb-263d-4a4a-bc3f-b645c449e392\") " pod="openstack/nova-api-0" Oct 06 13:59:35 crc kubenswrapper[4757]: I1006 13:59:35.433184 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c84ddadb-263d-4a4a-bc3f-b645c449e392-logs\") pod \"nova-api-0\" (UID: \"c84ddadb-263d-4a4a-bc3f-b645c449e392\") " pod="openstack/nova-api-0" Oct 06 13:59:35 crc kubenswrapper[4757]: I1006 13:59:35.438789 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c84ddadb-263d-4a4a-bc3f-b645c449e392-public-tls-certs\") pod \"nova-api-0\" (UID: \"c84ddadb-263d-4a4a-bc3f-b645c449e392\") " pod="openstack/nova-api-0" Oct 06 13:59:35 crc kubenswrapper[4757]: I1006 13:59:35.438984 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c84ddadb-263d-4a4a-bc3f-b645c449e392-config-data\") pod \"nova-api-0\" (UID: \"c84ddadb-263d-4a4a-bc3f-b645c449e392\") " pod="openstack/nova-api-0" Oct 06 13:59:35 crc kubenswrapper[4757]: I1006 13:59:35.439709 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c84ddadb-263d-4a4a-bc3f-b645c449e392-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"c84ddadb-263d-4a4a-bc3f-b645c449e392\") " pod="openstack/nova-api-0" Oct 06 13:59:35 crc kubenswrapper[4757]: I1006 13:59:35.451204 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w9mfm\" (UniqueName: \"kubernetes.io/projected/c84ddadb-263d-4a4a-bc3f-b645c449e392-kube-api-access-w9mfm\") pod \"nova-api-0\" (UID: \"c84ddadb-263d-4a4a-bc3f-b645c449e392\") " pod="openstack/nova-api-0" Oct 06 13:59:35 crc kubenswrapper[4757]: I1006 13:59:35.452179 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c84ddadb-263d-4a4a-bc3f-b645c449e392-internal-tls-certs\") pod \"nova-api-0\" (UID: \"c84ddadb-263d-4a4a-bc3f-b645c449e392\") " pod="openstack/nova-api-0" Oct 06 13:59:35 crc kubenswrapper[4757]: I1006 13:59:35.596701 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 06 13:59:35 crc kubenswrapper[4757]: I1006 13:59:35.902524 4757 generic.go:334] "Generic (PLEG): container finished" podID="acb2a4db-c5b2-4d68-a1ab-bd3fcdf578cd" containerID="cc57a918f77c5a7708d90b3d81c02a594ecb2bbeab6536376c5a5cbb9a9fb91a" exitCode=0 Oct 06 13:59:35 crc kubenswrapper[4757]: I1006 13:59:35.902635 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"acb2a4db-c5b2-4d68-a1ab-bd3fcdf578cd","Type":"ContainerDied","Data":"cc57a918f77c5a7708d90b3d81c02a594ecb2bbeab6536376c5a5cbb9a9fb91a"} Oct 06 13:59:36 crc kubenswrapper[4757]: I1006 13:59:36.066851 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 06 13:59:36 crc kubenswrapper[4757]: I1006 13:59:36.078306 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 06 13:59:36 crc kubenswrapper[4757]: W1006 13:59:36.091935 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc84ddadb_263d_4a4a_bc3f_b645c449e392.slice/crio-6bf5e94750ff6b0139f3e6e340904b34fbedc17adfa35a34ca143177362a95be WatchSource:0}: Error finding container 6bf5e94750ff6b0139f3e6e340904b34fbedc17adfa35a34ca143177362a95be: Status 404 returned error can't find the container with id 6bf5e94750ff6b0139f3e6e340904b34fbedc17adfa35a34ca143177362a95be Oct 06 13:59:36 crc kubenswrapper[4757]: I1006 13:59:36.149979 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bmbrm\" (UniqueName: \"kubernetes.io/projected/acb2a4db-c5b2-4d68-a1ab-bd3fcdf578cd-kube-api-access-bmbrm\") pod \"acb2a4db-c5b2-4d68-a1ab-bd3fcdf578cd\" (UID: \"acb2a4db-c5b2-4d68-a1ab-bd3fcdf578cd\") " Oct 06 13:59:36 crc kubenswrapper[4757]: I1006 13:59:36.150056 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/acb2a4db-c5b2-4d68-a1ab-bd3fcdf578cd-config-data\") pod \"acb2a4db-c5b2-4d68-a1ab-bd3fcdf578cd\" (UID: \"acb2a4db-c5b2-4d68-a1ab-bd3fcdf578cd\") " Oct 06 13:59:36 crc kubenswrapper[4757]: I1006 13:59:36.150228 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/acb2a4db-c5b2-4d68-a1ab-bd3fcdf578cd-combined-ca-bundle\") pod \"acb2a4db-c5b2-4d68-a1ab-bd3fcdf578cd\" (UID: \"acb2a4db-c5b2-4d68-a1ab-bd3fcdf578cd\") " Oct 06 13:59:36 crc kubenswrapper[4757]: I1006 13:59:36.156437 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/acb2a4db-c5b2-4d68-a1ab-bd3fcdf578cd-kube-api-access-bmbrm" (OuterVolumeSpecName: "kube-api-access-bmbrm") pod "acb2a4db-c5b2-4d68-a1ab-bd3fcdf578cd" (UID: "acb2a4db-c5b2-4d68-a1ab-bd3fcdf578cd"). InnerVolumeSpecName "kube-api-access-bmbrm". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:59:36 crc kubenswrapper[4757]: I1006 13:59:36.188629 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/acb2a4db-c5b2-4d68-a1ab-bd3fcdf578cd-config-data" (OuterVolumeSpecName: "config-data") pod "acb2a4db-c5b2-4d68-a1ab-bd3fcdf578cd" (UID: "acb2a4db-c5b2-4d68-a1ab-bd3fcdf578cd"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:59:36 crc kubenswrapper[4757]: I1006 13:59:36.191918 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/acb2a4db-c5b2-4d68-a1ab-bd3fcdf578cd-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "acb2a4db-c5b2-4d68-a1ab-bd3fcdf578cd" (UID: "acb2a4db-c5b2-4d68-a1ab-bd3fcdf578cd"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:59:36 crc kubenswrapper[4757]: I1006 13:59:36.194258 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e55a2d6b-bbfd-4e55-8d60-352b8de971ca" path="/var/lib/kubelet/pods/e55a2d6b-bbfd-4e55-8d60-352b8de971ca/volumes" Oct 06 13:59:36 crc kubenswrapper[4757]: I1006 13:59:36.252836 4757 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/acb2a4db-c5b2-4d68-a1ab-bd3fcdf578cd-config-data\") on node \"crc\" DevicePath \"\"" Oct 06 13:59:36 crc kubenswrapper[4757]: I1006 13:59:36.252867 4757 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/acb2a4db-c5b2-4d68-a1ab-bd3fcdf578cd-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 06 13:59:36 crc kubenswrapper[4757]: I1006 13:59:36.252877 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bmbrm\" (UniqueName: \"kubernetes.io/projected/acb2a4db-c5b2-4d68-a1ab-bd3fcdf578cd-kube-api-access-bmbrm\") on node \"crc\" DevicePath \"\"" Oct 06 13:59:36 crc kubenswrapper[4757]: I1006 13:59:36.915748 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"acb2a4db-c5b2-4d68-a1ab-bd3fcdf578cd","Type":"ContainerDied","Data":"b14ecb732ffb10a5e1338aa64f993ba95d6a265261506daac4e1d98eca5453e8"} Oct 06 13:59:36 crc kubenswrapper[4757]: I1006 13:59:36.916176 4757 scope.go:117] "RemoveContainer" containerID="cc57a918f77c5a7708d90b3d81c02a594ecb2bbeab6536376c5a5cbb9a9fb91a" Oct 06 13:59:36 crc kubenswrapper[4757]: I1006 13:59:36.915835 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 06 13:59:36 crc kubenswrapper[4757]: I1006 13:59:36.920216 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"c84ddadb-263d-4a4a-bc3f-b645c449e392","Type":"ContainerStarted","Data":"19bef65829342cf5b8843924bb989c79fcfe0aebe827b2d20e1c06064c84e886"} Oct 06 13:59:36 crc kubenswrapper[4757]: I1006 13:59:36.920262 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"c84ddadb-263d-4a4a-bc3f-b645c449e392","Type":"ContainerStarted","Data":"7472b490592c5f3cc0283af1e322f1bf86e9549fc11e557328a21eca6cdc47ab"} Oct 06 13:59:36 crc kubenswrapper[4757]: I1006 13:59:36.920277 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"c84ddadb-263d-4a4a-bc3f-b645c449e392","Type":"ContainerStarted","Data":"6bf5e94750ff6b0139f3e6e340904b34fbedc17adfa35a34ca143177362a95be"} Oct 06 13:59:36 crc kubenswrapper[4757]: I1006 13:59:36.951126 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=1.9511080299999999 podStartE2EDuration="1.95110803s" podCreationTimestamp="2025-10-06 13:59:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:59:36.949372435 +0000 UTC m=+1265.446690982" watchObservedRunningTime="2025-10-06 13:59:36.95110803 +0000 UTC m=+1265.448426577" Oct 06 13:59:36 crc kubenswrapper[4757]: I1006 13:59:36.970371 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 06 13:59:36 crc kubenswrapper[4757]: I1006 13:59:36.977832 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Oct 06 13:59:36 crc kubenswrapper[4757]: I1006 13:59:36.995421 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Oct 06 13:59:36 crc kubenswrapper[4757]: E1006 13:59:36.996964 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="acb2a4db-c5b2-4d68-a1ab-bd3fcdf578cd" containerName="nova-scheduler-scheduler" Oct 06 13:59:36 crc kubenswrapper[4757]: I1006 13:59:36.996993 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="acb2a4db-c5b2-4d68-a1ab-bd3fcdf578cd" containerName="nova-scheduler-scheduler" Oct 06 13:59:36 crc kubenswrapper[4757]: I1006 13:59:36.997287 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="acb2a4db-c5b2-4d68-a1ab-bd3fcdf578cd" containerName="nova-scheduler-scheduler" Oct 06 13:59:36 crc kubenswrapper[4757]: I1006 13:59:36.998038 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 06 13:59:37 crc kubenswrapper[4757]: I1006 13:59:37.000725 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Oct 06 13:59:37 crc kubenswrapper[4757]: I1006 13:59:37.015128 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 06 13:59:37 crc kubenswrapper[4757]: I1006 13:59:37.075804 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/19ac04ce-d95a-49ab-8eb2-eaf505990a53-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"19ac04ce-d95a-49ab-8eb2-eaf505990a53\") " pod="openstack/nova-scheduler-0" Oct 06 13:59:37 crc kubenswrapper[4757]: I1006 13:59:37.075902 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kzx5q\" (UniqueName: \"kubernetes.io/projected/19ac04ce-d95a-49ab-8eb2-eaf505990a53-kube-api-access-kzx5q\") pod \"nova-scheduler-0\" (UID: \"19ac04ce-d95a-49ab-8eb2-eaf505990a53\") " pod="openstack/nova-scheduler-0" Oct 06 13:59:37 crc kubenswrapper[4757]: I1006 13:59:37.076114 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/19ac04ce-d95a-49ab-8eb2-eaf505990a53-config-data\") pod \"nova-scheduler-0\" (UID: \"19ac04ce-d95a-49ab-8eb2-eaf505990a53\") " pod="openstack/nova-scheduler-0" Oct 06 13:59:37 crc kubenswrapper[4757]: I1006 13:59:37.178309 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/19ac04ce-d95a-49ab-8eb2-eaf505990a53-config-data\") pod \"nova-scheduler-0\" (UID: \"19ac04ce-d95a-49ab-8eb2-eaf505990a53\") " pod="openstack/nova-scheduler-0" Oct 06 13:59:37 crc kubenswrapper[4757]: I1006 13:59:37.178568 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/19ac04ce-d95a-49ab-8eb2-eaf505990a53-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"19ac04ce-d95a-49ab-8eb2-eaf505990a53\") " pod="openstack/nova-scheduler-0" Oct 06 13:59:37 crc kubenswrapper[4757]: I1006 13:59:37.178641 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kzx5q\" (UniqueName: \"kubernetes.io/projected/19ac04ce-d95a-49ab-8eb2-eaf505990a53-kube-api-access-kzx5q\") pod \"nova-scheduler-0\" (UID: \"19ac04ce-d95a-49ab-8eb2-eaf505990a53\") " pod="openstack/nova-scheduler-0" Oct 06 13:59:37 crc kubenswrapper[4757]: I1006 13:59:37.184294 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/19ac04ce-d95a-49ab-8eb2-eaf505990a53-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"19ac04ce-d95a-49ab-8eb2-eaf505990a53\") " pod="openstack/nova-scheduler-0" Oct 06 13:59:37 crc kubenswrapper[4757]: I1006 13:59:37.184886 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/19ac04ce-d95a-49ab-8eb2-eaf505990a53-config-data\") pod \"nova-scheduler-0\" (UID: \"19ac04ce-d95a-49ab-8eb2-eaf505990a53\") " pod="openstack/nova-scheduler-0" Oct 06 13:59:37 crc kubenswrapper[4757]: I1006 13:59:37.204400 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kzx5q\" (UniqueName: \"kubernetes.io/projected/19ac04ce-d95a-49ab-8eb2-eaf505990a53-kube-api-access-kzx5q\") pod \"nova-scheduler-0\" (UID: \"19ac04ce-d95a-49ab-8eb2-eaf505990a53\") " pod="openstack/nova-scheduler-0" Oct 06 13:59:37 crc kubenswrapper[4757]: I1006 13:59:37.317655 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 06 13:59:37 crc kubenswrapper[4757]: I1006 13:59:37.749557 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 06 13:59:37 crc kubenswrapper[4757]: I1006 13:59:37.821618 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 06 13:59:37 crc kubenswrapper[4757]: W1006 13:59:37.831457 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod19ac04ce_d95a_49ab_8eb2_eaf505990a53.slice/crio-91edb9e968f70f253f6fa3bbcd656777ae3f7929bbfc0d79e3fea81b74e4c539 WatchSource:0}: Error finding container 91edb9e968f70f253f6fa3bbcd656777ae3f7929bbfc0d79e3fea81b74e4c539: Status 404 returned error can't find the container with id 91edb9e968f70f253f6fa3bbcd656777ae3f7929bbfc0d79e3fea81b74e4c539 Oct 06 13:59:37 crc kubenswrapper[4757]: I1006 13:59:37.890308 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3dac5395-f6e7-4731-af9c-813f1863e380-combined-ca-bundle\") pod \"3dac5395-f6e7-4731-af9c-813f1863e380\" (UID: \"3dac5395-f6e7-4731-af9c-813f1863e380\") " Oct 06 13:59:37 crc kubenswrapper[4757]: I1006 13:59:37.890665 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/3dac5395-f6e7-4731-af9c-813f1863e380-nova-metadata-tls-certs\") pod \"3dac5395-f6e7-4731-af9c-813f1863e380\" (UID: \"3dac5395-f6e7-4731-af9c-813f1863e380\") " Oct 06 13:59:37 crc kubenswrapper[4757]: I1006 13:59:37.890697 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3dac5395-f6e7-4731-af9c-813f1863e380-config-data\") pod \"3dac5395-f6e7-4731-af9c-813f1863e380\" (UID: \"3dac5395-f6e7-4731-af9c-813f1863e380\") " Oct 06 13:59:37 crc kubenswrapper[4757]: I1006 13:59:37.890737 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-twbl2\" (UniqueName: \"kubernetes.io/projected/3dac5395-f6e7-4731-af9c-813f1863e380-kube-api-access-twbl2\") pod \"3dac5395-f6e7-4731-af9c-813f1863e380\" (UID: \"3dac5395-f6e7-4731-af9c-813f1863e380\") " Oct 06 13:59:37 crc kubenswrapper[4757]: I1006 13:59:37.890805 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3dac5395-f6e7-4731-af9c-813f1863e380-logs\") pod \"3dac5395-f6e7-4731-af9c-813f1863e380\" (UID: \"3dac5395-f6e7-4731-af9c-813f1863e380\") " Oct 06 13:59:37 crc kubenswrapper[4757]: I1006 13:59:37.891944 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3dac5395-f6e7-4731-af9c-813f1863e380-logs" (OuterVolumeSpecName: "logs") pod "3dac5395-f6e7-4731-af9c-813f1863e380" (UID: "3dac5395-f6e7-4731-af9c-813f1863e380"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 13:59:37 crc kubenswrapper[4757]: I1006 13:59:37.896431 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3dac5395-f6e7-4731-af9c-813f1863e380-kube-api-access-twbl2" (OuterVolumeSpecName: "kube-api-access-twbl2") pod "3dac5395-f6e7-4731-af9c-813f1863e380" (UID: "3dac5395-f6e7-4731-af9c-813f1863e380"). InnerVolumeSpecName "kube-api-access-twbl2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 13:59:37 crc kubenswrapper[4757]: I1006 13:59:37.913416 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3dac5395-f6e7-4731-af9c-813f1863e380-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3dac5395-f6e7-4731-af9c-813f1863e380" (UID: "3dac5395-f6e7-4731-af9c-813f1863e380"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:59:37 crc kubenswrapper[4757]: I1006 13:59:37.918937 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3dac5395-f6e7-4731-af9c-813f1863e380-config-data" (OuterVolumeSpecName: "config-data") pod "3dac5395-f6e7-4731-af9c-813f1863e380" (UID: "3dac5395-f6e7-4731-af9c-813f1863e380"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:59:37 crc kubenswrapper[4757]: I1006 13:59:37.931389 4757 generic.go:334] "Generic (PLEG): container finished" podID="3dac5395-f6e7-4731-af9c-813f1863e380" containerID="801178c0cb4ca905bb061c23b2ba73d8ef853b39f1f006e92aaa5766c0055f4a" exitCode=0 Oct 06 13:59:37 crc kubenswrapper[4757]: I1006 13:59:37.931426 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"3dac5395-f6e7-4731-af9c-813f1863e380","Type":"ContainerDied","Data":"801178c0cb4ca905bb061c23b2ba73d8ef853b39f1f006e92aaa5766c0055f4a"} Oct 06 13:59:37 crc kubenswrapper[4757]: I1006 13:59:37.931468 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"3dac5395-f6e7-4731-af9c-813f1863e380","Type":"ContainerDied","Data":"51500f68601534894ca385810250f3e13fc5c0f39a888a88787dbd5aa8f09673"} Oct 06 13:59:37 crc kubenswrapper[4757]: I1006 13:59:37.931473 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 06 13:59:37 crc kubenswrapper[4757]: I1006 13:59:37.931488 4757 scope.go:117] "RemoveContainer" containerID="801178c0cb4ca905bb061c23b2ba73d8ef853b39f1f006e92aaa5766c0055f4a" Oct 06 13:59:37 crc kubenswrapper[4757]: I1006 13:59:37.934766 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"19ac04ce-d95a-49ab-8eb2-eaf505990a53","Type":"ContainerStarted","Data":"91edb9e968f70f253f6fa3bbcd656777ae3f7929bbfc0d79e3fea81b74e4c539"} Oct 06 13:59:37 crc kubenswrapper[4757]: I1006 13:59:37.943390 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3dac5395-f6e7-4731-af9c-813f1863e380-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "3dac5395-f6e7-4731-af9c-813f1863e380" (UID: "3dac5395-f6e7-4731-af9c-813f1863e380"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 13:59:37 crc kubenswrapper[4757]: I1006 13:59:37.959311 4757 scope.go:117] "RemoveContainer" containerID="8907f890429a8a18c28d5bc247e937c79db73296823725bac7d87caa3d5feb27" Oct 06 13:59:37 crc kubenswrapper[4757]: I1006 13:59:37.983088 4757 scope.go:117] "RemoveContainer" containerID="801178c0cb4ca905bb061c23b2ba73d8ef853b39f1f006e92aaa5766c0055f4a" Oct 06 13:59:37 crc kubenswrapper[4757]: E1006 13:59:37.983602 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"801178c0cb4ca905bb061c23b2ba73d8ef853b39f1f006e92aaa5766c0055f4a\": container with ID starting with 801178c0cb4ca905bb061c23b2ba73d8ef853b39f1f006e92aaa5766c0055f4a not found: ID does not exist" containerID="801178c0cb4ca905bb061c23b2ba73d8ef853b39f1f006e92aaa5766c0055f4a" Oct 06 13:59:37 crc kubenswrapper[4757]: I1006 13:59:37.983649 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"801178c0cb4ca905bb061c23b2ba73d8ef853b39f1f006e92aaa5766c0055f4a"} err="failed to get container status \"801178c0cb4ca905bb061c23b2ba73d8ef853b39f1f006e92aaa5766c0055f4a\": rpc error: code = NotFound desc = could not find container \"801178c0cb4ca905bb061c23b2ba73d8ef853b39f1f006e92aaa5766c0055f4a\": container with ID starting with 801178c0cb4ca905bb061c23b2ba73d8ef853b39f1f006e92aaa5766c0055f4a not found: ID does not exist" Oct 06 13:59:37 crc kubenswrapper[4757]: I1006 13:59:37.983694 4757 scope.go:117] "RemoveContainer" containerID="8907f890429a8a18c28d5bc247e937c79db73296823725bac7d87caa3d5feb27" Oct 06 13:59:37 crc kubenswrapper[4757]: E1006 13:59:37.984523 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8907f890429a8a18c28d5bc247e937c79db73296823725bac7d87caa3d5feb27\": container with ID starting with 8907f890429a8a18c28d5bc247e937c79db73296823725bac7d87caa3d5feb27 not found: ID does not exist" containerID="8907f890429a8a18c28d5bc247e937c79db73296823725bac7d87caa3d5feb27" Oct 06 13:59:37 crc kubenswrapper[4757]: I1006 13:59:37.984560 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8907f890429a8a18c28d5bc247e937c79db73296823725bac7d87caa3d5feb27"} err="failed to get container status \"8907f890429a8a18c28d5bc247e937c79db73296823725bac7d87caa3d5feb27\": rpc error: code = NotFound desc = could not find container \"8907f890429a8a18c28d5bc247e937c79db73296823725bac7d87caa3d5feb27\": container with ID starting with 8907f890429a8a18c28d5bc247e937c79db73296823725bac7d87caa3d5feb27 not found: ID does not exist" Oct 06 13:59:37 crc kubenswrapper[4757]: I1006 13:59:37.992737 4757 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3dac5395-f6e7-4731-af9c-813f1863e380-logs\") on node \"crc\" DevicePath \"\"" Oct 06 13:59:37 crc kubenswrapper[4757]: I1006 13:59:37.992767 4757 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3dac5395-f6e7-4731-af9c-813f1863e380-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 06 13:59:37 crc kubenswrapper[4757]: I1006 13:59:37.992777 4757 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/3dac5395-f6e7-4731-af9c-813f1863e380-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 06 13:59:37 crc kubenswrapper[4757]: I1006 13:59:37.992786 4757 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3dac5395-f6e7-4731-af9c-813f1863e380-config-data\") on node \"crc\" DevicePath \"\"" Oct 06 13:59:37 crc kubenswrapper[4757]: I1006 13:59:37.992794 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-twbl2\" (UniqueName: \"kubernetes.io/projected/3dac5395-f6e7-4731-af9c-813f1863e380-kube-api-access-twbl2\") on node \"crc\" DevicePath \"\"" Oct 06 13:59:38 crc kubenswrapper[4757]: I1006 13:59:38.189498 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="acb2a4db-c5b2-4d68-a1ab-bd3fcdf578cd" path="/var/lib/kubelet/pods/acb2a4db-c5b2-4d68-a1ab-bd3fcdf578cd/volumes" Oct 06 13:59:38 crc kubenswrapper[4757]: I1006 13:59:38.305955 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 06 13:59:38 crc kubenswrapper[4757]: I1006 13:59:38.314940 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Oct 06 13:59:38 crc kubenswrapper[4757]: I1006 13:59:38.332052 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Oct 06 13:59:38 crc kubenswrapper[4757]: E1006 13:59:38.332636 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3dac5395-f6e7-4731-af9c-813f1863e380" containerName="nova-metadata-log" Oct 06 13:59:38 crc kubenswrapper[4757]: I1006 13:59:38.332668 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="3dac5395-f6e7-4731-af9c-813f1863e380" containerName="nova-metadata-log" Oct 06 13:59:38 crc kubenswrapper[4757]: E1006 13:59:38.332694 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3dac5395-f6e7-4731-af9c-813f1863e380" containerName="nova-metadata-metadata" Oct 06 13:59:38 crc kubenswrapper[4757]: I1006 13:59:38.332708 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="3dac5395-f6e7-4731-af9c-813f1863e380" containerName="nova-metadata-metadata" Oct 06 13:59:38 crc kubenswrapper[4757]: I1006 13:59:38.333133 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="3dac5395-f6e7-4731-af9c-813f1863e380" containerName="nova-metadata-log" Oct 06 13:59:38 crc kubenswrapper[4757]: I1006 13:59:38.333191 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="3dac5395-f6e7-4731-af9c-813f1863e380" containerName="nova-metadata-metadata" Oct 06 13:59:38 crc kubenswrapper[4757]: I1006 13:59:38.338400 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 06 13:59:38 crc kubenswrapper[4757]: I1006 13:59:38.341587 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Oct 06 13:59:38 crc kubenswrapper[4757]: I1006 13:59:38.341780 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Oct 06 13:59:38 crc kubenswrapper[4757]: I1006 13:59:38.346337 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 06 13:59:38 crc kubenswrapper[4757]: I1006 13:59:38.399315 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/562413a3-660e-4ed9-92d6-23cb7d84b936-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"562413a3-660e-4ed9-92d6-23cb7d84b936\") " pod="openstack/nova-metadata-0" Oct 06 13:59:38 crc kubenswrapper[4757]: I1006 13:59:38.399400 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pjgg2\" (UniqueName: \"kubernetes.io/projected/562413a3-660e-4ed9-92d6-23cb7d84b936-kube-api-access-pjgg2\") pod \"nova-metadata-0\" (UID: \"562413a3-660e-4ed9-92d6-23cb7d84b936\") " pod="openstack/nova-metadata-0" Oct 06 13:59:38 crc kubenswrapper[4757]: I1006 13:59:38.399628 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/562413a3-660e-4ed9-92d6-23cb7d84b936-logs\") pod \"nova-metadata-0\" (UID: \"562413a3-660e-4ed9-92d6-23cb7d84b936\") " pod="openstack/nova-metadata-0" Oct 06 13:59:38 crc kubenswrapper[4757]: I1006 13:59:38.399665 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/562413a3-660e-4ed9-92d6-23cb7d84b936-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"562413a3-660e-4ed9-92d6-23cb7d84b936\") " pod="openstack/nova-metadata-0" Oct 06 13:59:38 crc kubenswrapper[4757]: I1006 13:59:38.399726 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/562413a3-660e-4ed9-92d6-23cb7d84b936-config-data\") pod \"nova-metadata-0\" (UID: \"562413a3-660e-4ed9-92d6-23cb7d84b936\") " pod="openstack/nova-metadata-0" Oct 06 13:59:38 crc kubenswrapper[4757]: I1006 13:59:38.501876 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/562413a3-660e-4ed9-92d6-23cb7d84b936-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"562413a3-660e-4ed9-92d6-23cb7d84b936\") " pod="openstack/nova-metadata-0" Oct 06 13:59:38 crc kubenswrapper[4757]: I1006 13:59:38.501946 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pjgg2\" (UniqueName: \"kubernetes.io/projected/562413a3-660e-4ed9-92d6-23cb7d84b936-kube-api-access-pjgg2\") pod \"nova-metadata-0\" (UID: \"562413a3-660e-4ed9-92d6-23cb7d84b936\") " pod="openstack/nova-metadata-0" Oct 06 13:59:38 crc kubenswrapper[4757]: I1006 13:59:38.501982 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/562413a3-660e-4ed9-92d6-23cb7d84b936-logs\") pod \"nova-metadata-0\" (UID: \"562413a3-660e-4ed9-92d6-23cb7d84b936\") " pod="openstack/nova-metadata-0" Oct 06 13:59:38 crc kubenswrapper[4757]: I1006 13:59:38.502001 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/562413a3-660e-4ed9-92d6-23cb7d84b936-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"562413a3-660e-4ed9-92d6-23cb7d84b936\") " pod="openstack/nova-metadata-0" Oct 06 13:59:38 crc kubenswrapper[4757]: I1006 13:59:38.502020 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/562413a3-660e-4ed9-92d6-23cb7d84b936-config-data\") pod \"nova-metadata-0\" (UID: \"562413a3-660e-4ed9-92d6-23cb7d84b936\") " pod="openstack/nova-metadata-0" Oct 06 13:59:38 crc kubenswrapper[4757]: I1006 13:59:38.502726 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/562413a3-660e-4ed9-92d6-23cb7d84b936-logs\") pod \"nova-metadata-0\" (UID: \"562413a3-660e-4ed9-92d6-23cb7d84b936\") " pod="openstack/nova-metadata-0" Oct 06 13:59:38 crc kubenswrapper[4757]: I1006 13:59:38.508838 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/562413a3-660e-4ed9-92d6-23cb7d84b936-config-data\") pod \"nova-metadata-0\" (UID: \"562413a3-660e-4ed9-92d6-23cb7d84b936\") " pod="openstack/nova-metadata-0" Oct 06 13:59:38 crc kubenswrapper[4757]: I1006 13:59:38.509067 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/562413a3-660e-4ed9-92d6-23cb7d84b936-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"562413a3-660e-4ed9-92d6-23cb7d84b936\") " pod="openstack/nova-metadata-0" Oct 06 13:59:38 crc kubenswrapper[4757]: I1006 13:59:38.515969 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/562413a3-660e-4ed9-92d6-23cb7d84b936-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"562413a3-660e-4ed9-92d6-23cb7d84b936\") " pod="openstack/nova-metadata-0" Oct 06 13:59:38 crc kubenswrapper[4757]: I1006 13:59:38.520213 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pjgg2\" (UniqueName: \"kubernetes.io/projected/562413a3-660e-4ed9-92d6-23cb7d84b936-kube-api-access-pjgg2\") pod \"nova-metadata-0\" (UID: \"562413a3-660e-4ed9-92d6-23cb7d84b936\") " pod="openstack/nova-metadata-0" Oct 06 13:59:38 crc kubenswrapper[4757]: I1006 13:59:38.655262 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 06 13:59:38 crc kubenswrapper[4757]: I1006 13:59:38.945871 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"19ac04ce-d95a-49ab-8eb2-eaf505990a53","Type":"ContainerStarted","Data":"656d1308014c9035d686484ef2821c17e6d01aebe44ce39b183a689e42ab6a36"} Oct 06 13:59:38 crc kubenswrapper[4757]: I1006 13:59:38.971919 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.97190449 podStartE2EDuration="2.97190449s" podCreationTimestamp="2025-10-06 13:59:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:59:38.970847706 +0000 UTC m=+1267.468166253" watchObservedRunningTime="2025-10-06 13:59:38.97190449 +0000 UTC m=+1267.469223027" Oct 06 13:59:39 crc kubenswrapper[4757]: I1006 13:59:39.147290 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 06 13:59:39 crc kubenswrapper[4757]: W1006 13:59:39.156554 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod562413a3_660e_4ed9_92d6_23cb7d84b936.slice/crio-b58dcfbdf92a639666169c93fd3e7256db5479f38d5082dfae579dd37e27cd69 WatchSource:0}: Error finding container b58dcfbdf92a639666169c93fd3e7256db5479f38d5082dfae579dd37e27cd69: Status 404 returned error can't find the container with id b58dcfbdf92a639666169c93fd3e7256db5479f38d5082dfae579dd37e27cd69 Oct 06 13:59:39 crc kubenswrapper[4757]: I1006 13:59:39.960357 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"562413a3-660e-4ed9-92d6-23cb7d84b936","Type":"ContainerStarted","Data":"a330434e1329166fdf158a4f47bc672f8e6b950f06a5a900972a0d32c53e1214"} Oct 06 13:59:39 crc kubenswrapper[4757]: I1006 13:59:39.960617 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"562413a3-660e-4ed9-92d6-23cb7d84b936","Type":"ContainerStarted","Data":"f0fd96582d94c846019bab0c120449cbd9e2f834e146e3c3bd6f8f0ec4a69ae9"} Oct 06 13:59:39 crc kubenswrapper[4757]: I1006 13:59:39.960632 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"562413a3-660e-4ed9-92d6-23cb7d84b936","Type":"ContainerStarted","Data":"b58dcfbdf92a639666169c93fd3e7256db5479f38d5082dfae579dd37e27cd69"} Oct 06 13:59:40 crc kubenswrapper[4757]: I1006 13:59:40.006109 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.006068446 podStartE2EDuration="2.006068446s" podCreationTimestamp="2025-10-06 13:59:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 13:59:40.000295451 +0000 UTC m=+1268.497614008" watchObservedRunningTime="2025-10-06 13:59:40.006068446 +0000 UTC m=+1268.503386993" Oct 06 13:59:40 crc kubenswrapper[4757]: I1006 13:59:40.194365 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3dac5395-f6e7-4731-af9c-813f1863e380" path="/var/lib/kubelet/pods/3dac5395-f6e7-4731-af9c-813f1863e380/volumes" Oct 06 13:59:42 crc kubenswrapper[4757]: I1006 13:59:42.317921 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Oct 06 13:59:43 crc kubenswrapper[4757]: I1006 13:59:43.655482 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 06 13:59:43 crc kubenswrapper[4757]: I1006 13:59:43.655561 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 06 13:59:45 crc kubenswrapper[4757]: I1006 13:59:45.598298 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 06 13:59:45 crc kubenswrapper[4757]: I1006 13:59:45.598612 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 06 13:59:46 crc kubenswrapper[4757]: I1006 13:59:46.613423 4757 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="c84ddadb-263d-4a4a-bc3f-b645c449e392" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.202:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 06 13:59:46 crc kubenswrapper[4757]: I1006 13:59:46.614012 4757 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="c84ddadb-263d-4a4a-bc3f-b645c449e392" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.202:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 06 13:59:47 crc kubenswrapper[4757]: I1006 13:59:47.318257 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Oct 06 13:59:47 crc kubenswrapper[4757]: I1006 13:59:47.356604 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Oct 06 13:59:48 crc kubenswrapper[4757]: I1006 13:59:48.081845 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Oct 06 13:59:48 crc kubenswrapper[4757]: I1006 13:59:48.655688 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Oct 06 13:59:48 crc kubenswrapper[4757]: I1006 13:59:48.656130 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Oct 06 13:59:49 crc kubenswrapper[4757]: I1006 13:59:49.670268 4757 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="562413a3-660e-4ed9-92d6-23cb7d84b936" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.204:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 06 13:59:49 crc kubenswrapper[4757]: I1006 13:59:49.670272 4757 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="562413a3-660e-4ed9-92d6-23cb7d84b936" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.204:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 06 13:59:54 crc kubenswrapper[4757]: I1006 13:59:54.138428 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Oct 06 13:59:55 crc kubenswrapper[4757]: I1006 13:59:55.604430 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Oct 06 13:59:55 crc kubenswrapper[4757]: I1006 13:59:55.604841 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Oct 06 13:59:55 crc kubenswrapper[4757]: I1006 13:59:55.605164 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Oct 06 13:59:55 crc kubenswrapper[4757]: I1006 13:59:55.605536 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Oct 06 13:59:55 crc kubenswrapper[4757]: I1006 13:59:55.611550 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Oct 06 13:59:55 crc kubenswrapper[4757]: I1006 13:59:55.612868 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Oct 06 13:59:58 crc kubenswrapper[4757]: I1006 13:59:58.660298 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Oct 06 13:59:58 crc kubenswrapper[4757]: I1006 13:59:58.666792 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Oct 06 13:59:58 crc kubenswrapper[4757]: I1006 13:59:58.674207 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Oct 06 13:59:59 crc kubenswrapper[4757]: I1006 13:59:59.181428 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Oct 06 14:00:00 crc kubenswrapper[4757]: I1006 14:00:00.149541 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29329320-7c82b"] Oct 06 14:00:00 crc kubenswrapper[4757]: I1006 14:00:00.151473 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29329320-7c82b" Oct 06 14:00:00 crc kubenswrapper[4757]: I1006 14:00:00.154502 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 06 14:00:00 crc kubenswrapper[4757]: I1006 14:00:00.158057 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 06 14:00:00 crc kubenswrapper[4757]: I1006 14:00:00.219783 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29329320-7c82b"] Oct 06 14:00:00 crc kubenswrapper[4757]: I1006 14:00:00.240906 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/aff831bf-b060-42c3-a4af-013f232517fc-secret-volume\") pod \"collect-profiles-29329320-7c82b\" (UID: \"aff831bf-b060-42c3-a4af-013f232517fc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29329320-7c82b" Oct 06 14:00:00 crc kubenswrapper[4757]: I1006 14:00:00.241308 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nv7x5\" (UniqueName: \"kubernetes.io/projected/aff831bf-b060-42c3-a4af-013f232517fc-kube-api-access-nv7x5\") pod \"collect-profiles-29329320-7c82b\" (UID: \"aff831bf-b060-42c3-a4af-013f232517fc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29329320-7c82b" Oct 06 14:00:00 crc kubenswrapper[4757]: I1006 14:00:00.241507 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/aff831bf-b060-42c3-a4af-013f232517fc-config-volume\") pod \"collect-profiles-29329320-7c82b\" (UID: \"aff831bf-b060-42c3-a4af-013f232517fc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29329320-7c82b" Oct 06 14:00:00 crc kubenswrapper[4757]: I1006 14:00:00.343760 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/aff831bf-b060-42c3-a4af-013f232517fc-config-volume\") pod \"collect-profiles-29329320-7c82b\" (UID: \"aff831bf-b060-42c3-a4af-013f232517fc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29329320-7c82b" Oct 06 14:00:00 crc kubenswrapper[4757]: I1006 14:00:00.344209 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/aff831bf-b060-42c3-a4af-013f232517fc-secret-volume\") pod \"collect-profiles-29329320-7c82b\" (UID: \"aff831bf-b060-42c3-a4af-013f232517fc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29329320-7c82b" Oct 06 14:00:00 crc kubenswrapper[4757]: I1006 14:00:00.344362 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nv7x5\" (UniqueName: \"kubernetes.io/projected/aff831bf-b060-42c3-a4af-013f232517fc-kube-api-access-nv7x5\") pod \"collect-profiles-29329320-7c82b\" (UID: \"aff831bf-b060-42c3-a4af-013f232517fc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29329320-7c82b" Oct 06 14:00:00 crc kubenswrapper[4757]: I1006 14:00:00.345299 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/aff831bf-b060-42c3-a4af-013f232517fc-config-volume\") pod \"collect-profiles-29329320-7c82b\" (UID: \"aff831bf-b060-42c3-a4af-013f232517fc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29329320-7c82b" Oct 06 14:00:00 crc kubenswrapper[4757]: I1006 14:00:00.349718 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/aff831bf-b060-42c3-a4af-013f232517fc-secret-volume\") pod \"collect-profiles-29329320-7c82b\" (UID: \"aff831bf-b060-42c3-a4af-013f232517fc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29329320-7c82b" Oct 06 14:00:00 crc kubenswrapper[4757]: I1006 14:00:00.364927 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nv7x5\" (UniqueName: \"kubernetes.io/projected/aff831bf-b060-42c3-a4af-013f232517fc-kube-api-access-nv7x5\") pod \"collect-profiles-29329320-7c82b\" (UID: \"aff831bf-b060-42c3-a4af-013f232517fc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29329320-7c82b" Oct 06 14:00:00 crc kubenswrapper[4757]: I1006 14:00:00.500503 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29329320-7c82b" Oct 06 14:00:00 crc kubenswrapper[4757]: I1006 14:00:00.944687 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29329320-7c82b"] Oct 06 14:00:00 crc kubenswrapper[4757]: W1006 14:00:00.950021 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podaff831bf_b060_42c3_a4af_013f232517fc.slice/crio-ba488e453b0e1b388e084d7ee65ec71c7083d12a9ee4af737ecd0b2d138d23ca WatchSource:0}: Error finding container ba488e453b0e1b388e084d7ee65ec71c7083d12a9ee4af737ecd0b2d138d23ca: Status 404 returned error can't find the container with id ba488e453b0e1b388e084d7ee65ec71c7083d12a9ee4af737ecd0b2d138d23ca Oct 06 14:00:01 crc kubenswrapper[4757]: I1006 14:00:01.210809 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29329320-7c82b" event={"ID":"aff831bf-b060-42c3-a4af-013f232517fc","Type":"ContainerStarted","Data":"5ce20a78c90863c4b79ad3c82e2e84e797731dd140f3ca3a25cb4e714ff4b4e1"} Oct 06 14:00:01 crc kubenswrapper[4757]: I1006 14:00:01.211198 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29329320-7c82b" event={"ID":"aff831bf-b060-42c3-a4af-013f232517fc","Type":"ContainerStarted","Data":"ba488e453b0e1b388e084d7ee65ec71c7083d12a9ee4af737ecd0b2d138d23ca"} Oct 06 14:00:01 crc kubenswrapper[4757]: I1006 14:00:01.236438 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29329320-7c82b" podStartSLOduration=1.236413282 podStartE2EDuration="1.236413282s" podCreationTimestamp="2025-10-06 14:00:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 14:00:01.234705887 +0000 UTC m=+1289.732024434" watchObservedRunningTime="2025-10-06 14:00:01.236413282 +0000 UTC m=+1289.733731819" Oct 06 14:00:02 crc kubenswrapper[4757]: I1006 14:00:02.222694 4757 generic.go:334] "Generic (PLEG): container finished" podID="aff831bf-b060-42c3-a4af-013f232517fc" containerID="5ce20a78c90863c4b79ad3c82e2e84e797731dd140f3ca3a25cb4e714ff4b4e1" exitCode=0 Oct 06 14:00:02 crc kubenswrapper[4757]: I1006 14:00:02.222771 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29329320-7c82b" event={"ID":"aff831bf-b060-42c3-a4af-013f232517fc","Type":"ContainerDied","Data":"5ce20a78c90863c4b79ad3c82e2e84e797731dd140f3ca3a25cb4e714ff4b4e1"} Oct 06 14:00:03 crc kubenswrapper[4757]: I1006 14:00:03.581659 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29329320-7c82b" Oct 06 14:00:03 crc kubenswrapper[4757]: I1006 14:00:03.609271 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/aff831bf-b060-42c3-a4af-013f232517fc-secret-volume\") pod \"aff831bf-b060-42c3-a4af-013f232517fc\" (UID: \"aff831bf-b060-42c3-a4af-013f232517fc\") " Oct 06 14:00:03 crc kubenswrapper[4757]: I1006 14:00:03.609353 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nv7x5\" (UniqueName: \"kubernetes.io/projected/aff831bf-b060-42c3-a4af-013f232517fc-kube-api-access-nv7x5\") pod \"aff831bf-b060-42c3-a4af-013f232517fc\" (UID: \"aff831bf-b060-42c3-a4af-013f232517fc\") " Oct 06 14:00:03 crc kubenswrapper[4757]: I1006 14:00:03.609396 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/aff831bf-b060-42c3-a4af-013f232517fc-config-volume\") pod \"aff831bf-b060-42c3-a4af-013f232517fc\" (UID: \"aff831bf-b060-42c3-a4af-013f232517fc\") " Oct 06 14:00:03 crc kubenswrapper[4757]: I1006 14:00:03.610793 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/aff831bf-b060-42c3-a4af-013f232517fc-config-volume" (OuterVolumeSpecName: "config-volume") pod "aff831bf-b060-42c3-a4af-013f232517fc" (UID: "aff831bf-b060-42c3-a4af-013f232517fc"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 14:00:03 crc kubenswrapper[4757]: I1006 14:00:03.616757 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aff831bf-b060-42c3-a4af-013f232517fc-kube-api-access-nv7x5" (OuterVolumeSpecName: "kube-api-access-nv7x5") pod "aff831bf-b060-42c3-a4af-013f232517fc" (UID: "aff831bf-b060-42c3-a4af-013f232517fc"). InnerVolumeSpecName "kube-api-access-nv7x5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 14:00:03 crc kubenswrapper[4757]: I1006 14:00:03.617789 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aff831bf-b060-42c3-a4af-013f232517fc-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "aff831bf-b060-42c3-a4af-013f232517fc" (UID: "aff831bf-b060-42c3-a4af-013f232517fc"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 14:00:03 crc kubenswrapper[4757]: I1006 14:00:03.712319 4757 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/aff831bf-b060-42c3-a4af-013f232517fc-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:03 crc kubenswrapper[4757]: I1006 14:00:03.712632 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nv7x5\" (UniqueName: \"kubernetes.io/projected/aff831bf-b060-42c3-a4af-013f232517fc-kube-api-access-nv7x5\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:03 crc kubenswrapper[4757]: I1006 14:00:03.712712 4757 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/aff831bf-b060-42c3-a4af-013f232517fc-config-volume\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:04 crc kubenswrapper[4757]: I1006 14:00:04.246807 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29329320-7c82b" event={"ID":"aff831bf-b060-42c3-a4af-013f232517fc","Type":"ContainerDied","Data":"ba488e453b0e1b388e084d7ee65ec71c7083d12a9ee4af737ecd0b2d138d23ca"} Oct 06 14:00:04 crc kubenswrapper[4757]: I1006 14:00:04.246864 4757 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ba488e453b0e1b388e084d7ee65ec71c7083d12a9ee4af737ecd0b2d138d23ca" Oct 06 14:00:04 crc kubenswrapper[4757]: I1006 14:00:04.247418 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29329320-7c82b" Oct 06 14:00:04 crc kubenswrapper[4757]: I1006 14:00:04.361584 4757 patch_prober.go:28] interesting pod/machine-config-daemon-7tb7h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 06 14:00:04 crc kubenswrapper[4757]: I1006 14:00:04.361961 4757 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 06 14:00:18 crc kubenswrapper[4757]: I1006 14:00:18.820889 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstackclient"] Oct 06 14:00:18 crc kubenswrapper[4757]: I1006 14:00:18.821562 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/openstackclient" podUID="f2520c55-6861-4a9a-aa4e-8c1b1ace3dfb" containerName="openstackclient" containerID="cri-o://60487956b45ab32166592c2b7eceedfd15fb90339151dc9e4ceaf70a848d1bb5" gracePeriod=2 Oct 06 14:00:18 crc kubenswrapper[4757]: I1006 14:00:18.834676 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstackclient"] Oct 06 14:00:18 crc kubenswrapper[4757]: I1006 14:00:18.989181 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 06 14:00:18 crc kubenswrapper[4757]: I1006 14:00:18.989417 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="5a2f1fd8-9191-40cd-9edb-6a681214aaa9" containerName="cinder-scheduler" containerID="cri-o://28e913a804b505e4f014857f7b6a17fb5e76553cd58c85ad5dd061ab65fa664b" gracePeriod=30 Oct 06 14:00:18 crc kubenswrapper[4757]: I1006 14:00:18.989569 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="5a2f1fd8-9191-40cd-9edb-6a681214aaa9" containerName="probe" containerID="cri-o://0324863c9c638456508e30d35fe0f9846118f58caedb60fbed5e6e14f91c6b11" gracePeriod=30 Oct 06 14:00:19 crc kubenswrapper[4757]: I1006 14:00:19.122004 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 06 14:00:19 crc kubenswrapper[4757]: I1006 14:00:19.153982 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron2bb5-account-delete-r8kww"] Oct 06 14:00:19 crc kubenswrapper[4757]: E1006 14:00:19.165750 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aff831bf-b060-42c3-a4af-013f232517fc" containerName="collect-profiles" Oct 06 14:00:19 crc kubenswrapper[4757]: I1006 14:00:19.165900 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="aff831bf-b060-42c3-a4af-013f232517fc" containerName="collect-profiles" Oct 06 14:00:19 crc kubenswrapper[4757]: E1006 14:00:19.166002 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f2520c55-6861-4a9a-aa4e-8c1b1ace3dfb" containerName="openstackclient" Oct 06 14:00:19 crc kubenswrapper[4757]: I1006 14:00:19.166077 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="f2520c55-6861-4a9a-aa4e-8c1b1ace3dfb" containerName="openstackclient" Oct 06 14:00:19 crc kubenswrapper[4757]: I1006 14:00:19.166438 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="aff831bf-b060-42c3-a4af-013f232517fc" containerName="collect-profiles" Oct 06 14:00:19 crc kubenswrapper[4757]: I1006 14:00:19.166530 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="f2520c55-6861-4a9a-aa4e-8c1b1ace3dfb" containerName="openstackclient" Oct 06 14:00:19 crc kubenswrapper[4757]: I1006 14:00:19.167319 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Oct 06 14:00:19 crc kubenswrapper[4757]: I1006 14:00:19.167593 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="a274a347-259a-4919-8326-8047df9b0de8" containerName="cinder-api-log" containerID="cri-o://56539da0d904647a7143dee780f395e8c568df58a690438ce63ea5e4176ade53" gracePeriod=30 Oct 06 14:00:19 crc kubenswrapper[4757]: I1006 14:00:19.167852 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron2bb5-account-delete-r8kww" Oct 06 14:00:19 crc kubenswrapper[4757]: I1006 14:00:19.168729 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="a274a347-259a-4919-8326-8047df9b0de8" containerName="cinder-api" containerID="cri-o://66adf9671d94409c8d526ef54e28136551e0537951eeabe62b8c147ef1d852be" gracePeriod=30 Oct 06 14:00:19 crc kubenswrapper[4757]: I1006 14:00:19.192432 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron2bb5-account-delete-r8kww"] Oct 06 14:00:19 crc kubenswrapper[4757]: I1006 14:00:19.272809 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/novaapid1d1-account-delete-j7wzz"] Oct 06 14:00:19 crc kubenswrapper[4757]: I1006 14:00:19.331738 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/novaapid1d1-account-delete-j7wzz" Oct 06 14:00:19 crc kubenswrapper[4757]: I1006 14:00:19.368282 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/novaapid1d1-account-delete-j7wzz"] Oct 06 14:00:19 crc kubenswrapper[4757]: I1006 14:00:19.411157 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/novacell00cee-account-delete-2rcjp"] Oct 06 14:00:19 crc kubenswrapper[4757]: I1006 14:00:19.412468 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/novacell00cee-account-delete-2rcjp" Oct 06 14:00:19 crc kubenswrapper[4757]: I1006 14:00:19.435525 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q9xkr\" (UniqueName: \"kubernetes.io/projected/dffbfc0a-c4ec-41cc-873b-552bc6b7fa69-kube-api-access-q9xkr\") pod \"neutron2bb5-account-delete-r8kww\" (UID: \"dffbfc0a-c4ec-41cc-873b-552bc6b7fa69\") " pod="openstack/neutron2bb5-account-delete-r8kww" Oct 06 14:00:19 crc kubenswrapper[4757]: E1006 14:00:19.436714 4757 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Oct 06 14:00:19 crc kubenswrapper[4757]: E1006 14:00:19.436764 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7-config-data podName:0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7 nodeName:}" failed. No retries permitted until 2025-10-06 14:00:19.936748443 +0000 UTC m=+1308.434066980 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7-config-data") pod "rabbitmq-server-0" (UID: "0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7") : configmap "rabbitmq-config-data" not found Oct 06 14:00:19 crc kubenswrapper[4757]: I1006 14:00:19.437305 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/novacell00cee-account-delete-2rcjp"] Oct 06 14:00:19 crc kubenswrapper[4757]: I1006 14:00:19.460149 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovsdbserver-nb-0"] Oct 06 14:00:19 crc kubenswrapper[4757]: I1006 14:00:19.460761 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovsdbserver-nb-0" podUID="47e3132d-b23b-47f2-b26e-5511df70deec" containerName="openstack-network-exporter" containerID="cri-o://0a01bd55e84626495bc5fb0d7d2194d09355579e800eb0a54e05bb99ed8a7ca6" gracePeriod=300 Oct 06 14:00:19 crc kubenswrapper[4757]: I1006 14:00:19.463880 4757 generic.go:334] "Generic (PLEG): container finished" podID="a274a347-259a-4919-8326-8047df9b0de8" containerID="56539da0d904647a7143dee780f395e8c568df58a690438ce63ea5e4176ade53" exitCode=143 Oct 06 14:00:19 crc kubenswrapper[4757]: I1006 14:00:19.463918 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"a274a347-259a-4919-8326-8047df9b0de8","Type":"ContainerDied","Data":"56539da0d904647a7143dee780f395e8c568df58a690438ce63ea5e4176ade53"} Oct 06 14:00:19 crc kubenswrapper[4757]: I1006 14:00:19.471878 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican39b0-account-delete-gptgt"] Oct 06 14:00:19 crc kubenswrapper[4757]: I1006 14:00:19.474332 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican39b0-account-delete-gptgt" Oct 06 14:00:19 crc kubenswrapper[4757]: I1006 14:00:19.482582 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican39b0-account-delete-gptgt"] Oct 06 14:00:19 crc kubenswrapper[4757]: I1006 14:00:19.537159 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q9xkr\" (UniqueName: \"kubernetes.io/projected/dffbfc0a-c4ec-41cc-873b-552bc6b7fa69-kube-api-access-q9xkr\") pod \"neutron2bb5-account-delete-r8kww\" (UID: \"dffbfc0a-c4ec-41cc-873b-552bc6b7fa69\") " pod="openstack/neutron2bb5-account-delete-r8kww" Oct 06 14:00:19 crc kubenswrapper[4757]: I1006 14:00:19.537256 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gdhxl\" (UniqueName: \"kubernetes.io/projected/24ed1753-25de-4bde-8158-52cb7dd6a2f1-kube-api-access-gdhxl\") pod \"novaapid1d1-account-delete-j7wzz\" (UID: \"24ed1753-25de-4bde-8158-52cb7dd6a2f1\") " pod="openstack/novaapid1d1-account-delete-j7wzz" Oct 06 14:00:19 crc kubenswrapper[4757]: I1006 14:00:19.537343 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-djhht\" (UniqueName: \"kubernetes.io/projected/7904f7d1-2332-4402-bd0b-4a40f5be43f9-kube-api-access-djhht\") pod \"novacell00cee-account-delete-2rcjp\" (UID: \"7904f7d1-2332-4402-bd0b-4a40f5be43f9\") " pod="openstack/novacell00cee-account-delete-2rcjp" Oct 06 14:00:19 crc kubenswrapper[4757]: I1006 14:00:19.539557 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-kjpw4"] Oct 06 14:00:19 crc kubenswrapper[4757]: I1006 14:00:19.550953 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-kjpw4"] Oct 06 14:00:19 crc kubenswrapper[4757]: I1006 14:00:19.574355 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q9xkr\" (UniqueName: \"kubernetes.io/projected/dffbfc0a-c4ec-41cc-873b-552bc6b7fa69-kube-api-access-q9xkr\") pod \"neutron2bb5-account-delete-r8kww\" (UID: \"dffbfc0a-c4ec-41cc-873b-552bc6b7fa69\") " pod="openstack/neutron2bb5-account-delete-r8kww" Oct 06 14:00:19 crc kubenswrapper[4757]: I1006 14:00:19.577277 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovsdbserver-nb-0" podUID="47e3132d-b23b-47f2-b26e-5511df70deec" containerName="ovsdbserver-nb" containerID="cri-o://199ad9e8a53109edcd91ed95ddd4ba51048d9ad4e3b2ca26b98c845c9f4ddebc" gracePeriod=300 Oct 06 14:00:19 crc kubenswrapper[4757]: I1006 14:00:19.638657 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gdhxl\" (UniqueName: \"kubernetes.io/projected/24ed1753-25de-4bde-8158-52cb7dd6a2f1-kube-api-access-gdhxl\") pod \"novaapid1d1-account-delete-j7wzz\" (UID: \"24ed1753-25de-4bde-8158-52cb7dd6a2f1\") " pod="openstack/novaapid1d1-account-delete-j7wzz" Oct 06 14:00:19 crc kubenswrapper[4757]: I1006 14:00:19.638746 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4g7z5\" (UniqueName: \"kubernetes.io/projected/76f4e4cc-eccd-4c44-a39a-a75c06383d92-kube-api-access-4g7z5\") pod \"barbican39b0-account-delete-gptgt\" (UID: \"76f4e4cc-eccd-4c44-a39a-a75c06383d92\") " pod="openstack/barbican39b0-account-delete-gptgt" Oct 06 14:00:19 crc kubenswrapper[4757]: I1006 14:00:19.638791 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-djhht\" (UniqueName: \"kubernetes.io/projected/7904f7d1-2332-4402-bd0b-4a40f5be43f9-kube-api-access-djhht\") pod \"novacell00cee-account-delete-2rcjp\" (UID: \"7904f7d1-2332-4402-bd0b-4a40f5be43f9\") " pod="openstack/novacell00cee-account-delete-2rcjp" Oct 06 14:00:19 crc kubenswrapper[4757]: I1006 14:00:19.680072 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-djhht\" (UniqueName: \"kubernetes.io/projected/7904f7d1-2332-4402-bd0b-4a40f5be43f9-kube-api-access-djhht\") pod \"novacell00cee-account-delete-2rcjp\" (UID: \"7904f7d1-2332-4402-bd0b-4a40f5be43f9\") " pod="openstack/novacell00cee-account-delete-2rcjp" Oct 06 14:00:19 crc kubenswrapper[4757]: I1006 14:00:19.681736 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gdhxl\" (UniqueName: \"kubernetes.io/projected/24ed1753-25de-4bde-8158-52cb7dd6a2f1-kube-api-access-gdhxl\") pod \"novaapid1d1-account-delete-j7wzz\" (UID: \"24ed1753-25de-4bde-8158-52cb7dd6a2f1\") " pod="openstack/novaapid1d1-account-delete-j7wzz" Oct 06 14:00:19 crc kubenswrapper[4757]: I1006 14:00:19.695572 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/novaapid1d1-account-delete-j7wzz" Oct 06 14:00:19 crc kubenswrapper[4757]: I1006 14:00:19.741690 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4g7z5\" (UniqueName: \"kubernetes.io/projected/76f4e4cc-eccd-4c44-a39a-a75c06383d92-kube-api-access-4g7z5\") pod \"barbican39b0-account-delete-gptgt\" (UID: \"76f4e4cc-eccd-4c44-a39a-a75c06383d92\") " pod="openstack/barbican39b0-account-delete-gptgt" Oct 06 14:00:19 crc kubenswrapper[4757]: I1006 14:00:19.756952 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/novacell00cee-account-delete-2rcjp" Oct 06 14:00:19 crc kubenswrapper[4757]: I1006 14:00:19.788725 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4g7z5\" (UniqueName: \"kubernetes.io/projected/76f4e4cc-eccd-4c44-a39a-a75c06383d92-kube-api-access-4g7z5\") pod \"barbican39b0-account-delete-gptgt\" (UID: \"76f4e4cc-eccd-4c44-a39a-a75c06383d92\") " pod="openstack/barbican39b0-account-delete-gptgt" Oct 06 14:00:19 crc kubenswrapper[4757]: I1006 14:00:19.803501 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron2bb5-account-delete-r8kww" Oct 06 14:00:19 crc kubenswrapper[4757]: I1006 14:00:19.820831 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican39b0-account-delete-gptgt" Oct 06 14:00:19 crc kubenswrapper[4757]: I1006 14:00:19.833026 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-northd-0"] Oct 06 14:00:19 crc kubenswrapper[4757]: I1006 14:00:19.833301 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-northd-0" podUID="cc01b313-87cb-44f6-9c85-84ae4931e1f6" containerName="ovn-northd" containerID="cri-o://1c6cdcbf807850c39d917f526c3686bad9485f567af58c814535019523f2074b" gracePeriod=30 Oct 06 14:00:19 crc kubenswrapper[4757]: I1006 14:00:19.833699 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-northd-0" podUID="cc01b313-87cb-44f6-9c85-84ae4931e1f6" containerName="openstack-network-exporter" containerID="cri-o://b1eadb61598f06991c54612b8621cac74bf4c117422efafbae7b8a42d1721473" gracePeriod=30 Oct 06 14:00:19 crc kubenswrapper[4757]: I1006 14:00:19.855739 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 06 14:00:19 crc kubenswrapper[4757]: E1006 14:00:19.948667 4757 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Oct 06 14:00:19 crc kubenswrapper[4757]: E1006 14:00:19.948721 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7-config-data podName:0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7 nodeName:}" failed. No retries permitted until 2025-10-06 14:00:20.948705295 +0000 UTC m=+1309.446023832 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7-config-data") pod "rabbitmq-server-0" (UID: "0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7") : configmap "rabbitmq-config-data" not found Oct 06 14:00:19 crc kubenswrapper[4757]: E1006 14:00:19.948905 4757 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Oct 06 14:00:19 crc kubenswrapper[4757]: E1006 14:00:19.948957 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61-config-data podName:cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61 nodeName:}" failed. No retries permitted until 2025-10-06 14:00:20.448940312 +0000 UTC m=+1308.946258849 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61-config-data") pod "rabbitmq-cell1-server-0" (UID: "cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61") : configmap "rabbitmq-cell1-config-data" not found Oct 06 14:00:19 crc kubenswrapper[4757]: I1006 14:00:19.992528 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-95qtm"] Oct 06 14:00:20 crc kubenswrapper[4757]: I1006 14:00:20.012157 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-95qtm"] Oct 06 14:00:20 crc kubenswrapper[4757]: I1006 14:00:20.042844 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-vsmtw"] Oct 06 14:00:20 crc kubenswrapper[4757]: I1006 14:00:20.085827 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-vsmtw"] Oct 06 14:00:20 crc kubenswrapper[4757]: I1006 14:00:20.108272 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-6znnt"] Oct 06 14:00:20 crc kubenswrapper[4757]: I1006 14:00:20.144806 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-6znnt"] Oct 06 14:00:20 crc kubenswrapper[4757]: I1006 14:00:20.154436 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-metrics-l99t6"] Oct 06 14:00:20 crc kubenswrapper[4757]: I1006 14:00:20.156371 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-controller-metrics-l99t6" podUID="f38ce569-0a5c-408d-9d44-bb953d38e24e" containerName="openstack-network-exporter" containerID="cri-o://34a280e57324a0c49141921ad2801af530e40f7172ee7808ae2fc674330a1bf6" gracePeriod=30 Oct 06 14:00:20 crc kubenswrapper[4757]: I1006 14:00:20.185762 4757 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openstack/nova-cell1-conductor-0" secret="" err="secret \"nova-nova-dockercfg-hwxdw\" not found" Oct 06 14:00:20 crc kubenswrapper[4757]: I1006 14:00:20.237277 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3bf60902-663f-4bbc-8415-691e6519a557" path="/var/lib/kubelet/pods/3bf60902-663f-4bbc-8415-691e6519a557/volumes" Oct 06 14:00:20 crc kubenswrapper[4757]: I1006 14:00:20.237810 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="434206fd-9f64-4c9d-a528-55d9361dad92" path="/var/lib/kubelet/pods/434206fd-9f64-4c9d-a528-55d9361dad92/volumes" Oct 06 14:00:20 crc kubenswrapper[4757]: I1006 14:00:20.238415 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ccc640a0-578c-4bdd-9c71-789d3c821099" path="/var/lib/kubelet/pods/ccc640a0-578c-4bdd-9c71-789d3c821099/volumes" Oct 06 14:00:20 crc kubenswrapper[4757]: I1006 14:00:20.238900 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f9c83c06-5d47-4561-aa37-2376cc54401d" path="/var/lib/kubelet/pods/f9c83c06-5d47-4561-aa37-2376cc54401d/volumes" Oct 06 14:00:20 crc kubenswrapper[4757]: I1006 14:00:20.270483 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-kn7bx"] Oct 06 14:00:20 crc kubenswrapper[4757]: I1006 14:00:20.270519 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-ovs-fqwwx"] Oct 06 14:00:20 crc kubenswrapper[4757]: I1006 14:00:20.276235 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-kssfb"] Oct 06 14:00:20 crc kubenswrapper[4757]: I1006 14:00:20.303256 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-kssfb"] Oct 06 14:00:20 crc kubenswrapper[4757]: I1006 14:00:20.376184 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-587f5f887c-jf8v5"] Oct 06 14:00:20 crc kubenswrapper[4757]: I1006 14:00:20.376622 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-587f5f887c-jf8v5" podUID="0aa7bf0a-b5c3-4db6-b401-e8512e1df933" containerName="neutron-api" containerID="cri-o://a6b5253b08d5415774fa5b2cc0628bffd3d26d6c9de7019fd00ae41a8fba7edf" gracePeriod=30 Oct 06 14:00:20 crc kubenswrapper[4757]: I1006 14:00:20.379645 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-587f5f887c-jf8v5" podUID="0aa7bf0a-b5c3-4db6-b401-e8512e1df933" containerName="neutron-httpd" containerID="cri-o://bcd917c1f2b6fb1969bcd04d032fd022e5edf74f68f97b2567d296a2e2e84598" gracePeriod=30 Oct 06 14:00:20 crc kubenswrapper[4757]: I1006 14:00:20.392655 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-4nqxd"] Oct 06 14:00:20 crc kubenswrapper[4757]: I1006 14:00:20.399182 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-4nqxd"] Oct 06 14:00:20 crc kubenswrapper[4757]: I1006 14:00:20.406113 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-bswtq"] Oct 06 14:00:20 crc kubenswrapper[4757]: E1006 14:00:20.407111 4757 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="1c6cdcbf807850c39d917f526c3686bad9485f567af58c814535019523f2074b" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Oct 06 14:00:20 crc kubenswrapper[4757]: I1006 14:00:20.417024 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-bswtq"] Oct 06 14:00:20 crc kubenswrapper[4757]: E1006 14:00:20.418231 4757 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="1c6cdcbf807850c39d917f526c3686bad9485f567af58c814535019523f2074b" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Oct 06 14:00:20 crc kubenswrapper[4757]: E1006 14:00:20.439111 4757 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="1c6cdcbf807850c39d917f526c3686bad9485f567af58c814535019523f2074b" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Oct 06 14:00:20 crc kubenswrapper[4757]: E1006 14:00:20.439166 4757 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-northd-0" podUID="cc01b313-87cb-44f6-9c85-84ae4931e1f6" containerName="ovn-northd" Oct 06 14:00:20 crc kubenswrapper[4757]: E1006 14:00:20.471196 4757 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Oct 06 14:00:20 crc kubenswrapper[4757]: E1006 14:00:20.471247 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61-config-data podName:cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61 nodeName:}" failed. No retries permitted until 2025-10-06 14:00:21.471233395 +0000 UTC m=+1309.968551932 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61-config-data") pod "rabbitmq-cell1-server-0" (UID: "cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61") : configmap "rabbitmq-cell1-config-data" not found Oct 06 14:00:20 crc kubenswrapper[4757]: I1006 14:00:20.483359 4757 generic.go:334] "Generic (PLEG): container finished" podID="cc01b313-87cb-44f6-9c85-84ae4931e1f6" containerID="b1eadb61598f06991c54612b8621cac74bf4c117422efafbae7b8a42d1721473" exitCode=2 Oct 06 14:00:20 crc kubenswrapper[4757]: I1006 14:00:20.483435 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"cc01b313-87cb-44f6-9c85-84ae4931e1f6","Type":"ContainerDied","Data":"b1eadb61598f06991c54612b8621cac74bf4c117422efafbae7b8a42d1721473"} Oct 06 14:00:20 crc kubenswrapper[4757]: I1006 14:00:20.516046 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_47e3132d-b23b-47f2-b26e-5511df70deec/ovsdbserver-nb/0.log" Oct 06 14:00:20 crc kubenswrapper[4757]: I1006 14:00:20.516112 4757 generic.go:334] "Generic (PLEG): container finished" podID="47e3132d-b23b-47f2-b26e-5511df70deec" containerID="0a01bd55e84626495bc5fb0d7d2194d09355579e800eb0a54e05bb99ed8a7ca6" exitCode=2 Oct 06 14:00:20 crc kubenswrapper[4757]: I1006 14:00:20.516129 4757 generic.go:334] "Generic (PLEG): container finished" podID="47e3132d-b23b-47f2-b26e-5511df70deec" containerID="199ad9e8a53109edcd91ed95ddd4ba51048d9ad4e3b2ca26b98c845c9f4ddebc" exitCode=143 Oct 06 14:00:20 crc kubenswrapper[4757]: I1006 14:00:20.516211 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"47e3132d-b23b-47f2-b26e-5511df70deec","Type":"ContainerDied","Data":"0a01bd55e84626495bc5fb0d7d2194d09355579e800eb0a54e05bb99ed8a7ca6"} Oct 06 14:00:20 crc kubenswrapper[4757]: I1006 14:00:20.516237 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"47e3132d-b23b-47f2-b26e-5511df70deec","Type":"ContainerDied","Data":"199ad9e8a53109edcd91ed95ddd4ba51048d9ad4e3b2ca26b98c845c9f4ddebc"} Oct 06 14:00:20 crc kubenswrapper[4757]: I1006 14:00:20.529624 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-l99t6_f38ce569-0a5c-408d-9d44-bb953d38e24e/openstack-network-exporter/0.log" Oct 06 14:00:20 crc kubenswrapper[4757]: I1006 14:00:20.529664 4757 generic.go:334] "Generic (PLEG): container finished" podID="f38ce569-0a5c-408d-9d44-bb953d38e24e" containerID="34a280e57324a0c49141921ad2801af530e40f7172ee7808ae2fc674330a1bf6" exitCode=2 Oct 06 14:00:20 crc kubenswrapper[4757]: I1006 14:00:20.529692 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-l99t6" event={"ID":"f38ce569-0a5c-408d-9d44-bb953d38e24e","Type":"ContainerDied","Data":"34a280e57324a0c49141921ad2801af530e40f7172ee7808ae2fc674330a1bf6"} Oct 06 14:00:20 crc kubenswrapper[4757]: I1006 14:00:20.555639 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-storage-0"] Oct 06 14:00:20 crc kubenswrapper[4757]: I1006 14:00:20.556128 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e" containerName="account-server" containerID="cri-o://4e76a7ffee77c8b8fe521732d3733beb6c04f9473f44fd387b15b45fb090db2a" gracePeriod=30 Oct 06 14:00:20 crc kubenswrapper[4757]: I1006 14:00:20.556485 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e" containerName="swift-recon-cron" containerID="cri-o://a8dbabfcf162761d8bcdf1b87762716bfadb00917af0e64430a06f6608c87e0b" gracePeriod=30 Oct 06 14:00:20 crc kubenswrapper[4757]: I1006 14:00:20.556527 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e" containerName="rsync" containerID="cri-o://fdf14c9e9a23dfbae4c2900b46030a75c455df62b46a889978d4c18782fa297e" gracePeriod=30 Oct 06 14:00:20 crc kubenswrapper[4757]: I1006 14:00:20.556559 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e" containerName="object-expirer" containerID="cri-o://b83693af5dd62fa1415425945aa0fdfbeb7c951800bfc45ba952ad8e5300972e" gracePeriod=30 Oct 06 14:00:20 crc kubenswrapper[4757]: I1006 14:00:20.556593 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e" containerName="object-updater" containerID="cri-o://c590ad932f6bd2ffc90af02ba2b42481210297495161a7a724ec4887afc67fa4" gracePeriod=30 Oct 06 14:00:20 crc kubenswrapper[4757]: I1006 14:00:20.556623 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e" containerName="object-auditor" containerID="cri-o://425e4cf04bb701acf0bd6544c54c034243bb49ca6b9336c4557b3370b950c0a1" gracePeriod=30 Oct 06 14:00:20 crc kubenswrapper[4757]: I1006 14:00:20.556666 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e" containerName="object-replicator" containerID="cri-o://47cf60ed76e43b20f51da65306480b7f176e194d4f0852c1c215ef43e7f66636" gracePeriod=30 Oct 06 14:00:20 crc kubenswrapper[4757]: I1006 14:00:20.556695 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e" containerName="object-server" containerID="cri-o://128957938cd8f8a94388394a5a1f34e6af0a9c8532220084d8c4cd91a84f64e4" gracePeriod=30 Oct 06 14:00:20 crc kubenswrapper[4757]: I1006 14:00:20.556724 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e" containerName="container-updater" containerID="cri-o://2ed56ae25da969c6f1c6035ee758bb3bdff3f5231db2cbaa897ed5f375a124d2" gracePeriod=30 Oct 06 14:00:20 crc kubenswrapper[4757]: I1006 14:00:20.556751 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e" containerName="container-auditor" containerID="cri-o://aa31fc47a53cc26c6c2aeebfb6be0d53ecc196f7288a242d80a2314eaefca60c" gracePeriod=30 Oct 06 14:00:20 crc kubenswrapper[4757]: I1006 14:00:20.556778 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e" containerName="container-replicator" containerID="cri-o://b197392cb5a2b7307583df8b0fbe8691aa093fba29ccd5a59dcfd082146a01a1" gracePeriod=30 Oct 06 14:00:20 crc kubenswrapper[4757]: I1006 14:00:20.556805 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e" containerName="container-server" containerID="cri-o://4a3e474b0906c602fa42d67ff9a1ce42690ad02c00ae4982e1e8e4c832a3b23e" gracePeriod=30 Oct 06 14:00:20 crc kubenswrapper[4757]: I1006 14:00:20.556849 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e" containerName="account-reaper" containerID="cri-o://310e750d598b4155f3b7c06f9255cb7e0c3fd72dc7ef791abab66abcde43d44e" gracePeriod=30 Oct 06 14:00:20 crc kubenswrapper[4757]: I1006 14:00:20.556877 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e" containerName="account-auditor" containerID="cri-o://9e6653a1be3ad11eb600a3fb5b459d7efe07dd5b5f22cddfe18a97d785f0ab92" gracePeriod=30 Oct 06 14:00:20 crc kubenswrapper[4757]: I1006 14:00:20.556902 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e" containerName="account-replicator" containerID="cri-o://586142d874234725ea31d7fbbe5be2188dabc0eb32d3421991ab34ac2db87ceb" gracePeriod=30 Oct 06 14:00:20 crc kubenswrapper[4757]: I1006 14:00:20.638794 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-ring-rebalance-vpghx"] Oct 06 14:00:20 crc kubenswrapper[4757]: E1006 14:00:20.654176 4757 handlers.go:78] "Exec lifecycle hook for Container in Pod failed" err="command '/usr/share/ovn/scripts/ovn-ctl stop_controller' exited with 137: " execCommand=["/usr/share/ovn/scripts/ovn-ctl","stop_controller"] containerName="ovn-controller" pod="openstack/ovn-controller-kn7bx" message="Exiting ovn-controller (1) " Oct 06 14:00:20 crc kubenswrapper[4757]: E1006 14:00:20.654236 4757 kuberuntime_container.go:691] "PreStop hook failed" err="command '/usr/share/ovn/scripts/ovn-ctl stop_controller' exited with 137: " pod="openstack/ovn-controller-kn7bx" podUID="0efda247-fa18-49db-a37d-1dd28d999ed7" containerName="ovn-controller" containerID="cri-o://861e4848c2d8eae37a6ca300adccd71f39a8a6cf0249ed0fa7568d6ab9d744b3" Oct 06 14:00:20 crc kubenswrapper[4757]: I1006 14:00:20.654275 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-controller-kn7bx" podUID="0efda247-fa18-49db-a37d-1dd28d999ed7" containerName="ovn-controller" containerID="cri-o://861e4848c2d8eae37a6ca300adccd71f39a8a6cf0249ed0fa7568d6ab9d744b3" gracePeriod=30 Oct 06 14:00:20 crc kubenswrapper[4757]: I1006 14:00:20.679274 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/swift-ring-rebalance-vpghx"] Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:20.717470 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5785d8b947-nsbsm"] Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:20.717945 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5785d8b947-nsbsm" podUID="f8e78f45-d6b0-44d0-b84f-30ac3538ec3c" containerName="dnsmasq-dns" containerID="cri-o://96c3bb713c662f2ed04a7bb1393bbac6037596a56af39675d70856aba417eb71" gracePeriod=10 Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:20.797600 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovsdbserver-sb-0"] Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:20.798258 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovsdbserver-sb-0" podUID="780fa947-0a68-4231-b5b3-e0cad80204d2" containerName="openstack-network-exporter" containerID="cri-o://a12b1909659d9dffd84421e8aa816dc9c5582d96e48bf6ee24c272907e439897" gracePeriod=300 Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:20.830326 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstack-cell1-galera-0"] Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:20.876801 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:20.886024 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="562413a3-660e-4ed9-92d6-23cb7d84b936" containerName="nova-metadata-log" containerID="cri-o://f0fd96582d94c846019bab0c120449cbd9e2f834e146e3c3bd6f8f0ec4a69ae9" gracePeriod=30 Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:20.887272 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="562413a3-660e-4ed9-92d6-23cb7d84b936" containerName="nova-metadata-metadata" containerID="cri-o://a330434e1329166fdf158a4f47bc672f8e6b950f06a5a900972a0d32c53e1214" gracePeriod=30 Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:20.963683 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:20.963950 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="c84ddadb-263d-4a4a-bc3f-b645c449e392" containerName="nova-api-log" containerID="cri-o://7472b490592c5f3cc0283af1e322f1bf86e9549fc11e557328a21eca6cdc47ab" gracePeriod=30 Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:20.964383 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="c84ddadb-263d-4a4a-bc3f-b645c449e392" containerName="nova-api-api" containerID="cri-o://19bef65829342cf5b8843924bb989c79fcfe0aebe827b2d20e1c06064c84e886" gracePeriod=30 Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:20.991609 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-ff5468974-c5722"] Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:20.991888 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/placement-ff5468974-c5722" podUID="afc51d15-69dd-4900-886c-29a4f372df24" containerName="placement-log" containerID="cri-o://832893f98af9abff959fc064ee7dc85ca2245d5e8f5d1e911ffd6cf81dcf776b" gracePeriod=30 Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:20.992341 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/placement-ff5468974-c5722" podUID="afc51d15-69dd-4900-886c-29a4f372df24" containerName="placement-api" containerID="cri-o://501ec0020527b58bea34dc6aa1f5770171c8c96af98e8cc181a6f0a30690207d" gracePeriod=30 Oct 06 14:00:21 crc kubenswrapper[4757]: E1006 14:00:20.996883 4757 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Oct 06 14:00:21 crc kubenswrapper[4757]: E1006 14:00:20.997014 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7-config-data podName:0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7 nodeName:}" failed. No retries permitted until 2025-10-06 14:00:22.996989798 +0000 UTC m=+1311.494308345 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7-config-data") pod "rabbitmq-server-0" (UID: "0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7") : configmap "rabbitmq-config-data" not found Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.074664 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-7ql5g"] Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.120077 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-7ql5g"] Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.120608 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovsdbserver-sb-0" podUID="780fa947-0a68-4231-b5b3-e0cad80204d2" containerName="ovsdbserver-sb" containerID="cri-o://7f6ef039c9a56166775bfea66933d67e543e08d8ece459c480259808a1dad4e8" gracePeriod=300 Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.129679 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-3d6a-account-create-ktj8k"] Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.149839 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-3d6a-account-create-ktj8k"] Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.154810 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.155141 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="4da5a9db-df84-4b71-b566-7c723fd7eb65" containerName="glance-log" containerID="cri-o://0cb8bd1a995fc5d7d87c203ebb692a3ce29bbd8e20024032b2050c0645baebc6" gracePeriod=30 Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.155642 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="4da5a9db-df84-4b71-b566-7c723fd7eb65" containerName="glance-httpd" containerID="cri-o://fe4d0be031635e1711fdd14e7751dbf7c8f1ff14ed259fe9f2d2457646f22b8f" gracePeriod=30 Oct 06 14:00:21 crc kubenswrapper[4757]: E1006 14:00:21.159318 4757 handlers.go:78] "Exec lifecycle hook for Container in Pod failed" err=< Oct 06 14:00:21 crc kubenswrapper[4757]: command '/usr/local/bin/container-scripts/stop-ovsdb-server.sh' exited with 137: ++ dirname /usr/local/bin/container-scripts/stop-ovsdb-server.sh Oct 06 14:00:21 crc kubenswrapper[4757]: + source /usr/local/bin/container-scripts/functions Oct 06 14:00:21 crc kubenswrapper[4757]: ++ OVNBridge=br-int Oct 06 14:00:21 crc kubenswrapper[4757]: ++ OVNRemote=tcp:localhost:6642 Oct 06 14:00:21 crc kubenswrapper[4757]: ++ OVNEncapType=geneve Oct 06 14:00:21 crc kubenswrapper[4757]: ++ OVNAvailabilityZones= Oct 06 14:00:21 crc kubenswrapper[4757]: ++ EnableChassisAsGateway=true Oct 06 14:00:21 crc kubenswrapper[4757]: ++ PhysicalNetworks= Oct 06 14:00:21 crc kubenswrapper[4757]: ++ OVNHostName= Oct 06 14:00:21 crc kubenswrapper[4757]: ++ DB_FILE=/etc/openvswitch/conf.db Oct 06 14:00:21 crc kubenswrapper[4757]: ++ ovs_dir=/var/lib/openvswitch Oct 06 14:00:21 crc kubenswrapper[4757]: ++ FLOWS_RESTORE_SCRIPT=/var/lib/openvswitch/flows-script Oct 06 14:00:21 crc kubenswrapper[4757]: ++ FLOWS_RESTORE_DIR=/var/lib/openvswitch/saved-flows Oct 06 14:00:21 crc kubenswrapper[4757]: ++ SAFE_TO_STOP_OVSDB_SERVER_SEMAPHORE=/var/lib/openvswitch/is_safe_to_stop_ovsdb_server Oct 06 14:00:21 crc kubenswrapper[4757]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Oct 06 14:00:21 crc kubenswrapper[4757]: + sleep 0.5 Oct 06 14:00:21 crc kubenswrapper[4757]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Oct 06 14:00:21 crc kubenswrapper[4757]: + cleanup_ovsdb_server_semaphore Oct 06 14:00:21 crc kubenswrapper[4757]: + rm -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server Oct 06 14:00:21 crc kubenswrapper[4757]: + /usr/share/openvswitch/scripts/ovs-ctl stop --no-ovs-vswitchd Oct 06 14:00:21 crc kubenswrapper[4757]: > execCommand=["/usr/local/bin/container-scripts/stop-ovsdb-server.sh"] containerName="ovsdb-server" pod="openstack/ovn-controller-ovs-fqwwx" message=< Oct 06 14:00:21 crc kubenswrapper[4757]: Exiting ovsdb-server (5) [ OK ] Oct 06 14:00:21 crc kubenswrapper[4757]: ++ dirname /usr/local/bin/container-scripts/stop-ovsdb-server.sh Oct 06 14:00:21 crc kubenswrapper[4757]: + source /usr/local/bin/container-scripts/functions Oct 06 14:00:21 crc kubenswrapper[4757]: ++ OVNBridge=br-int Oct 06 14:00:21 crc kubenswrapper[4757]: ++ OVNRemote=tcp:localhost:6642 Oct 06 14:00:21 crc kubenswrapper[4757]: ++ OVNEncapType=geneve Oct 06 14:00:21 crc kubenswrapper[4757]: ++ OVNAvailabilityZones= Oct 06 14:00:21 crc kubenswrapper[4757]: ++ EnableChassisAsGateway=true Oct 06 14:00:21 crc kubenswrapper[4757]: ++ PhysicalNetworks= Oct 06 14:00:21 crc kubenswrapper[4757]: ++ OVNHostName= Oct 06 14:00:21 crc kubenswrapper[4757]: ++ DB_FILE=/etc/openvswitch/conf.db Oct 06 14:00:21 crc kubenswrapper[4757]: ++ ovs_dir=/var/lib/openvswitch Oct 06 14:00:21 crc kubenswrapper[4757]: ++ FLOWS_RESTORE_SCRIPT=/var/lib/openvswitch/flows-script Oct 06 14:00:21 crc kubenswrapper[4757]: ++ FLOWS_RESTORE_DIR=/var/lib/openvswitch/saved-flows Oct 06 14:00:21 crc kubenswrapper[4757]: ++ SAFE_TO_STOP_OVSDB_SERVER_SEMAPHORE=/var/lib/openvswitch/is_safe_to_stop_ovsdb_server Oct 06 14:00:21 crc kubenswrapper[4757]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Oct 06 14:00:21 crc kubenswrapper[4757]: + sleep 0.5 Oct 06 14:00:21 crc kubenswrapper[4757]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Oct 06 14:00:21 crc kubenswrapper[4757]: + cleanup_ovsdb_server_semaphore Oct 06 14:00:21 crc kubenswrapper[4757]: + rm -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server Oct 06 14:00:21 crc kubenswrapper[4757]: + /usr/share/openvswitch/scripts/ovs-ctl stop --no-ovs-vswitchd Oct 06 14:00:21 crc kubenswrapper[4757]: > Oct 06 14:00:21 crc kubenswrapper[4757]: E1006 14:00:21.159366 4757 kuberuntime_container.go:691] "PreStop hook failed" err=< Oct 06 14:00:21 crc kubenswrapper[4757]: command '/usr/local/bin/container-scripts/stop-ovsdb-server.sh' exited with 137: ++ dirname /usr/local/bin/container-scripts/stop-ovsdb-server.sh Oct 06 14:00:21 crc kubenswrapper[4757]: + source /usr/local/bin/container-scripts/functions Oct 06 14:00:21 crc kubenswrapper[4757]: ++ OVNBridge=br-int Oct 06 14:00:21 crc kubenswrapper[4757]: ++ OVNRemote=tcp:localhost:6642 Oct 06 14:00:21 crc kubenswrapper[4757]: ++ OVNEncapType=geneve Oct 06 14:00:21 crc kubenswrapper[4757]: ++ OVNAvailabilityZones= Oct 06 14:00:21 crc kubenswrapper[4757]: ++ EnableChassisAsGateway=true Oct 06 14:00:21 crc kubenswrapper[4757]: ++ PhysicalNetworks= Oct 06 14:00:21 crc kubenswrapper[4757]: ++ OVNHostName= Oct 06 14:00:21 crc kubenswrapper[4757]: ++ DB_FILE=/etc/openvswitch/conf.db Oct 06 14:00:21 crc kubenswrapper[4757]: ++ ovs_dir=/var/lib/openvswitch Oct 06 14:00:21 crc kubenswrapper[4757]: ++ FLOWS_RESTORE_SCRIPT=/var/lib/openvswitch/flows-script Oct 06 14:00:21 crc kubenswrapper[4757]: ++ FLOWS_RESTORE_DIR=/var/lib/openvswitch/saved-flows Oct 06 14:00:21 crc kubenswrapper[4757]: ++ SAFE_TO_STOP_OVSDB_SERVER_SEMAPHORE=/var/lib/openvswitch/is_safe_to_stop_ovsdb_server Oct 06 14:00:21 crc kubenswrapper[4757]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Oct 06 14:00:21 crc kubenswrapper[4757]: + sleep 0.5 Oct 06 14:00:21 crc kubenswrapper[4757]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Oct 06 14:00:21 crc kubenswrapper[4757]: + cleanup_ovsdb_server_semaphore Oct 06 14:00:21 crc kubenswrapper[4757]: + rm -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server Oct 06 14:00:21 crc kubenswrapper[4757]: + /usr/share/openvswitch/scripts/ovs-ctl stop --no-ovs-vswitchd Oct 06 14:00:21 crc kubenswrapper[4757]: > pod="openstack/ovn-controller-ovs-fqwwx" podUID="3482c1c4-b15b-46cb-a897-3528fa22adda" containerName="ovsdb-server" containerID="cri-o://21bc61d474e55f8578f65885857cf7de3c24140a6afe332ad8c23da23b86ee86" Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.159411 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-controller-ovs-fqwwx" podUID="3482c1c4-b15b-46cb-a897-3528fa22adda" containerName="ovsdb-server" containerID="cri-o://21bc61d474e55f8578f65885857cf7de3c24140a6afe332ad8c23da23b86ee86" gracePeriod=30 Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.187300 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-qwgbm"] Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.210990 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-qwgbm"] Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.225860 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-5c8d-account-create-q48sg"] Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.230509 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-controller-ovs-fqwwx" podUID="3482c1c4-b15b-46cb-a897-3528fa22adda" containerName="ovs-vswitchd" containerID="cri-o://ca5e23427b9e45a1476331fc4c8830e07091551016296db6fab1e5fc1115f6e4" gracePeriod=30 Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.236895 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-5c8d-account-create-q48sg"] Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.252338 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.252590 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="78a4414b-7eec-457f-b08c-aeb719ffc320" containerName="glance-log" containerID="cri-o://17ce35871062aaa7a1aa4139c7f03addfd9e5a2514de88d4e098daa173fc41a8" gracePeriod=30 Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.252966 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="78a4414b-7eec-457f-b08c-aeb719ffc320" containerName="glance-httpd" containerID="cri-o://627201afc996e1b5ebc9ec2b49081817f4957580a028ef0ede8ac13918294d2e" gracePeriod=30 Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.260701 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.271269 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-proxy-7c7848899c-8bm7g"] Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.271491 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-proxy-7c7848899c-8bm7g" podUID="ec6e1479-3b96-4c4b-be95-5834172d37ff" containerName="proxy-httpd" containerID="cri-o://ba1f45ef3932bf918c1b59fa1ed6629efa95b65ac36b643ce4bf6f517905a218" gracePeriod=30 Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.271594 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-proxy-7c7848899c-8bm7g" podUID="ec6e1479-3b96-4c4b-be95-5834172d37ff" containerName="proxy-server" containerID="cri-o://14022a75e2f882a054a2a8082d11133582e6585d36ef89867d94fb837bf54886" gracePeriod=30 Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.290002 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-6b2cq"] Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.297206 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/openstack-cell1-galera-0" podUID="39ef4372-3b20-44b5-b441-85f963e6a25a" containerName="galera" containerID="cri-o://c6ba27d47c99f13d015277533c3fba4500d832d05fa4632f9ba24c71c9d9ec8e" gracePeriod=30 Oct 06 14:00:21 crc kubenswrapper[4757]: E1006 14:00:21.304565 4757 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 199ad9e8a53109edcd91ed95ddd4ba51048d9ad4e3b2ca26b98c845c9f4ddebc is running failed: container process not found" containerID="199ad9e8a53109edcd91ed95ddd4ba51048d9ad4e3b2ca26b98c845c9f4ddebc" cmd=["/usr/bin/pidof","ovsdb-server"] Oct 06 14:00:21 crc kubenswrapper[4757]: E1006 14:00:21.305031 4757 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 199ad9e8a53109edcd91ed95ddd4ba51048d9ad4e3b2ca26b98c845c9f4ddebc is running failed: container process not found" containerID="199ad9e8a53109edcd91ed95ddd4ba51048d9ad4e3b2ca26b98c845c9f4ddebc" cmd=["/usr/bin/pidof","ovsdb-server"] Oct 06 14:00:21 crc kubenswrapper[4757]: E1006 14:00:21.308377 4757 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 199ad9e8a53109edcd91ed95ddd4ba51048d9ad4e3b2ca26b98c845c9f4ddebc is running failed: container process not found" containerID="199ad9e8a53109edcd91ed95ddd4ba51048d9ad4e3b2ca26b98c845c9f4ddebc" cmd=["/usr/bin/pidof","ovsdb-server"] Oct 06 14:00:21 crc kubenswrapper[4757]: E1006 14:00:21.308447 4757 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 199ad9e8a53109edcd91ed95ddd4ba51048d9ad4e3b2ca26b98c845c9f4ddebc is running failed: container process not found" probeType="Readiness" pod="openstack/ovsdbserver-nb-0" podUID="47e3132d-b23b-47f2-b26e-5511df70deec" containerName="ovsdbserver-nb" Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.312365 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-6b2cq"] Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.356317 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-39b0-account-create-slfc6"] Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.366160 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-39b0-account-create-slfc6"] Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.381743 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican39b0-account-delete-gptgt"] Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.404458 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-9f43-account-create-8588z"] Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.419240 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-bptsp"] Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.442211 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-9f43-account-create-8588z"] Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.448250 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-bptsp"] Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.449382 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7" containerName="rabbitmq" containerID="cri-o://1512b2729e3ce1fbedadd844006578943dc275809b766a9088f97452d877e7e7" gracePeriod=604800 Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.460395 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-k5f5t"] Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.461678 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-a940-account-create-wq96v"] Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.473119 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-a940-account-create-wq96v"] Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.473249 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-k5f5t"] Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.487014 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.488387 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="61d68e86-89f3-4dc6-bb42-7286c789fbba" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://1e20cdc4c0ab7fbe34675da2bd3b70c163eadea2e1127276d5eac3f283996f8c" gracePeriod=30 Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.497180 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.505071 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-5c78d5595b-89h8z"] Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.506235 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-5c78d5595b-89h8z" podUID="ac42434c-8367-4cf2-9134-2d85444f90f4" containerName="barbican-api-log" containerID="cri-o://92484d139159526d958a23b09b865ab4b1219b4eb5727898baa360c9ff0a42c2" gracePeriod=30 Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.506679 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-5c78d5595b-89h8z" podUID="ac42434c-8367-4cf2-9134-2d85444f90f4" containerName="barbican-api" containerID="cri-o://9fb3558d9cee93fdd2b8e54924363c8bbc9ddb4127d50abe466cbb11eaf6f296" gracePeriod=30 Oct 06 14:00:21 crc kubenswrapper[4757]: E1006 14:00:21.508888 4757 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Oct 06 14:00:21 crc kubenswrapper[4757]: E1006 14:00:21.509200 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61-config-data podName:cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61 nodeName:}" failed. No retries permitted until 2025-10-06 14:00:23.509178787 +0000 UTC m=+1312.006497324 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61-config-data") pod "rabbitmq-cell1-server-0" (UID: "cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61") : configmap "rabbitmq-cell1-config-data" not found Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.512214 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-worker-996ccd7c9-wv7n4"] Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.512397 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-worker-996ccd7c9-wv7n4" podUID="3e6333c1-01c0-42fd-a75f-31a2c57e9db2" containerName="barbican-worker-log" containerID="cri-o://b87d6d10ad564103f56d850792c4af8bcf31280f5d0ac840a8140a30f0613988" gracePeriod=30 Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.512675 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-worker-996ccd7c9-wv7n4" podUID="3e6333c1-01c0-42fd-a75f-31a2c57e9db2" containerName="barbican-worker" containerID="cri-o://7ca2f658b6761b42b567741f568c193b4f94743723c1e6c585be5cb9f29980bb" gracePeriod=30 Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.520520 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-keystone-listener-6f4c564f58-t9kxj"] Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.520801 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-keystone-listener-6f4c564f58-t9kxj" podUID="985540de-3212-41f4-a3a6-180ff5c4eda2" containerName="barbican-keystone-listener-log" containerID="cri-o://8657327aee1067ffca05e4e9c577e2dddc6b63823ba1e0a17d6ec6a140fc3cc0" gracePeriod=30 Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.520985 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-keystone-listener-6f4c564f58-t9kxj" podUID="985540de-3212-41f4-a3a6-180ff5c4eda2" containerName="barbican-keystone-listener" containerID="cri-o://103f1cb6b966566428982058c2497d82d3412e2a56a570fe97a8556bbaf90b4e" gracePeriod=30 Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.532466 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-ststm"] Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.543490 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.543961 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="19ac04ce-d95a-49ab-8eb2-eaf505990a53" containerName="nova-scheduler-scheduler" containerID="cri-o://656d1308014c9035d686484ef2821c17e6d01aebe44ce39b183a689e42ab6a36" gracePeriod=30 Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.550382 4757 generic.go:334] "Generic (PLEG): container finished" podID="562413a3-660e-4ed9-92d6-23cb7d84b936" containerID="f0fd96582d94c846019bab0c120449cbd9e2f834e146e3c3bd6f8f0ec4a69ae9" exitCode=143 Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.550447 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"562413a3-660e-4ed9-92d6-23cb7d84b936","Type":"ContainerDied","Data":"f0fd96582d94c846019bab0c120449cbd9e2f834e146e3c3bd6f8f0ec4a69ae9"} Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.551068 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-ststm"] Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.568922 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.568977 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.568988 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-nz4nw"] Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.569478 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-conductor-0" podUID="71099fbe-349d-4a04-857c-41f270ec89af" containerName="nova-cell1-conductor-conductor" containerID="cri-o://ab2102972a92e9dd4b82cbd0edb8ec4655819cab11c8d022f7994573563037f4" gracePeriod=30 Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.572906 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell0-conductor-0" podUID="924963ee-1194-4d98-84d7-9bb3e426f7bc" containerName="nova-cell0-conductor-conductor" containerID="cri-o://9ce79a47b7eef2fc901e662389fe9df01b87649a04e94bc2a16eff829fe1027b" gracePeriod=30 Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.574752 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-nz4nw"] Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.575677 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_780fa947-0a68-4231-b5b3-e0cad80204d2/ovsdbserver-sb/0.log" Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.575712 4757 generic.go:334] "Generic (PLEG): container finished" podID="780fa947-0a68-4231-b5b3-e0cad80204d2" containerID="a12b1909659d9dffd84421e8aa816dc9c5582d96e48bf6ee24c272907e439897" exitCode=2 Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.575726 4757 generic.go:334] "Generic (PLEG): container finished" podID="780fa947-0a68-4231-b5b3-e0cad80204d2" containerID="7f6ef039c9a56166775bfea66933d67e543e08d8ece459c480259808a1dad4e8" exitCode=143 Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.575765 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"780fa947-0a68-4231-b5b3-e0cad80204d2","Type":"ContainerDied","Data":"a12b1909659d9dffd84421e8aa816dc9c5582d96e48bf6ee24c272907e439897"} Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.575838 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"780fa947-0a68-4231-b5b3-e0cad80204d2","Type":"ContainerDied","Data":"7f6ef039c9a56166775bfea66933d67e543e08d8ece459c480259808a1dad4e8"} Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.578333 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-l99t6_f38ce569-0a5c-408d-9d44-bb953d38e24e/openstack-network-exporter/0.log" Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.578388 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-l99t6" event={"ID":"f38ce569-0a5c-408d-9d44-bb953d38e24e","Type":"ContainerDied","Data":"9d1c4d9bc8e983d7d6b4a5c87332d88666e7997a6545ad48697242920e341637"} Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.578404 4757 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9d1c4d9bc8e983d7d6b4a5c87332d88666e7997a6545ad48697242920e341637" Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.580430 4757 generic.go:334] "Generic (PLEG): container finished" podID="0aa7bf0a-b5c3-4db6-b401-e8512e1df933" containerID="bcd917c1f2b6fb1969bcd04d032fd022e5edf74f68f97b2567d296a2e2e84598" exitCode=0 Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.580468 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-587f5f887c-jf8v5" event={"ID":"0aa7bf0a-b5c3-4db6-b401-e8512e1df933","Type":"ContainerDied","Data":"bcd917c1f2b6fb1969bcd04d032fd022e5edf74f68f97b2567d296a2e2e84598"} Oct 06 14:00:21 crc kubenswrapper[4757]: E1006 14:00:21.581622 4757 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 861e4848c2d8eae37a6ca300adccd71f39a8a6cf0249ed0fa7568d6ab9d744b3 is running failed: container process not found" containerID="861e4848c2d8eae37a6ca300adccd71f39a8a6cf0249ed0fa7568d6ab9d744b3" cmd=["/usr/local/bin/container-scripts/ovn_controller_readiness.sh"] Oct 06 14:00:21 crc kubenswrapper[4757]: E1006 14:00:21.581966 4757 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 861e4848c2d8eae37a6ca300adccd71f39a8a6cf0249ed0fa7568d6ab9d744b3 is running failed: container process not found" containerID="861e4848c2d8eae37a6ca300adccd71f39a8a6cf0249ed0fa7568d6ab9d744b3" cmd=["/usr/local/bin/container-scripts/ovn_controller_readiness.sh"] Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.582108 4757 generic.go:334] "Generic (PLEG): container finished" podID="afc51d15-69dd-4900-886c-29a4f372df24" containerID="832893f98af9abff959fc064ee7dc85ca2245d5e8f5d1e911ffd6cf81dcf776b" exitCode=143 Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.582152 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-ff5468974-c5722" event={"ID":"afc51d15-69dd-4900-886c-29a4f372df24","Type":"ContainerDied","Data":"832893f98af9abff959fc064ee7dc85ca2245d5e8f5d1e911ffd6cf81dcf776b"} Oct 06 14:00:21 crc kubenswrapper[4757]: E1006 14:00:21.582463 4757 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 861e4848c2d8eae37a6ca300adccd71f39a8a6cf0249ed0fa7568d6ab9d744b3 is running failed: container process not found" containerID="861e4848c2d8eae37a6ca300adccd71f39a8a6cf0249ed0fa7568d6ab9d744b3" cmd=["/usr/local/bin/container-scripts/ovn_controller_readiness.sh"] Oct 06 14:00:21 crc kubenswrapper[4757]: E1006 14:00:21.582487 4757 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 861e4848c2d8eae37a6ca300adccd71f39a8a6cf0249ed0fa7568d6ab9d744b3 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-kn7bx" podUID="0efda247-fa18-49db-a37d-1dd28d999ed7" containerName="ovn-controller" Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.584562 4757 generic.go:334] "Generic (PLEG): container finished" podID="0efda247-fa18-49db-a37d-1dd28d999ed7" containerID="861e4848c2d8eae37a6ca300adccd71f39a8a6cf0249ed0fa7568d6ab9d744b3" exitCode=0 Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.584618 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-kn7bx" event={"ID":"0efda247-fa18-49db-a37d-1dd28d999ed7","Type":"ContainerDied","Data":"861e4848c2d8eae37a6ca300adccd71f39a8a6cf0249ed0fa7568d6ab9d744b3"} Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.587303 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_47e3132d-b23b-47f2-b26e-5511df70deec/ovsdbserver-nb/0.log" Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.587357 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"47e3132d-b23b-47f2-b26e-5511df70deec","Type":"ContainerDied","Data":"6e0a3c3b86d81999a73e81e0a4b29a852d1c9a0311760b3a102b1cb0c1809da7"} Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.587378 4757 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6e0a3c3b86d81999a73e81e0a4b29a852d1c9a0311760b3a102b1cb0c1809da7" Oct 06 14:00:21 crc kubenswrapper[4757]: E1006 14:00:21.597376 4757 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 21bc61d474e55f8578f65885857cf7de3c24140a6afe332ad8c23da23b86ee86 is running failed: container process not found" containerID="21bc61d474e55f8578f65885857cf7de3c24140a6afe332ad8c23da23b86ee86" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Oct 06 14:00:21 crc kubenswrapper[4757]: E1006 14:00:21.599039 4757 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 21bc61d474e55f8578f65885857cf7de3c24140a6afe332ad8c23da23b86ee86 is running failed: container process not found" containerID="21bc61d474e55f8578f65885857cf7de3c24140a6afe332ad8c23da23b86ee86" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Oct 06 14:00:21 crc kubenswrapper[4757]: E1006 14:00:21.601108 4757 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="ca5e23427b9e45a1476331fc4c8830e07091551016296db6fab1e5fc1115f6e4" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.601531 4757 generic.go:334] "Generic (PLEG): container finished" podID="5a2f1fd8-9191-40cd-9edb-6a681214aaa9" containerID="0324863c9c638456508e30d35fe0f9846118f58caedb60fbed5e6e14f91c6b11" exitCode=0 Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.601555 4757 generic.go:334] "Generic (PLEG): container finished" podID="5a2f1fd8-9191-40cd-9edb-6a681214aaa9" containerID="28e913a804b505e4f014857f7b6a17fb5e76553cd58c85ad5dd061ab65fa664b" exitCode=0 Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.601598 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"5a2f1fd8-9191-40cd-9edb-6a681214aaa9","Type":"ContainerDied","Data":"0324863c9c638456508e30d35fe0f9846118f58caedb60fbed5e6e14f91c6b11"} Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.601623 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"5a2f1fd8-9191-40cd-9edb-6a681214aaa9","Type":"ContainerDied","Data":"28e913a804b505e4f014857f7b6a17fb5e76553cd58c85ad5dd061ab65fa664b"} Oct 06 14:00:21 crc kubenswrapper[4757]: E1006 14:00:21.601646 4757 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 21bc61d474e55f8578f65885857cf7de3c24140a6afe332ad8c23da23b86ee86 is running failed: container process not found" containerID="21bc61d474e55f8578f65885857cf7de3c24140a6afe332ad8c23da23b86ee86" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Oct 06 14:00:21 crc kubenswrapper[4757]: E1006 14:00:21.601666 4757 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 21bc61d474e55f8578f65885857cf7de3c24140a6afe332ad8c23da23b86ee86 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-fqwwx" podUID="3482c1c4-b15b-46cb-a897-3528fa22adda" containerName="ovsdb-server" Oct 06 14:00:21 crc kubenswrapper[4757]: E1006 14:00:21.605776 4757 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="ca5e23427b9e45a1476331fc4c8830e07091551016296db6fab1e5fc1115f6e4" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.608825 4757 generic.go:334] "Generic (PLEG): container finished" podID="4da5a9db-df84-4b71-b566-7c723fd7eb65" containerID="0cb8bd1a995fc5d7d87c203ebb692a3ce29bbd8e20024032b2050c0645baebc6" exitCode=143 Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.608891 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"4da5a9db-df84-4b71-b566-7c723fd7eb65","Type":"ContainerDied","Data":"0cb8bd1a995fc5d7d87c203ebb692a3ce29bbd8e20024032b2050c0645baebc6"} Oct 06 14:00:21 crc kubenswrapper[4757]: E1006 14:00:21.608951 4757 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="ca5e23427b9e45a1476331fc4c8830e07091551016296db6fab1e5fc1115f6e4" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Oct 06 14:00:21 crc kubenswrapper[4757]: E1006 14:00:21.608977 4757 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-fqwwx" podUID="3482c1c4-b15b-46cb-a897-3528fa22adda" containerName="ovs-vswitchd" Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.610537 4757 generic.go:334] "Generic (PLEG): container finished" podID="f8e78f45-d6b0-44d0-b84f-30ac3538ec3c" containerID="96c3bb713c662f2ed04a7bb1393bbac6037596a56af39675d70856aba417eb71" exitCode=0 Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.610585 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5785d8b947-nsbsm" event={"ID":"f8e78f45-d6b0-44d0-b84f-30ac3538ec3c","Type":"ContainerDied","Data":"96c3bb713c662f2ed04a7bb1393bbac6037596a56af39675d70856aba417eb71"} Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.611965 4757 generic.go:334] "Generic (PLEG): container finished" podID="ec6e1479-3b96-4c4b-be95-5834172d37ff" containerID="ba1f45ef3932bf918c1b59fa1ed6629efa95b65ac36b643ce4bf6f517905a218" exitCode=0 Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.612006 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-7c7848899c-8bm7g" event={"ID":"ec6e1479-3b96-4c4b-be95-5834172d37ff","Type":"ContainerDied","Data":"ba1f45ef3932bf918c1b59fa1ed6629efa95b65ac36b643ce4bf6f517905a218"} Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.620134 4757 generic.go:334] "Generic (PLEG): container finished" podID="c84ddadb-263d-4a4a-bc3f-b645c449e392" containerID="7472b490592c5f3cc0283af1e322f1bf86e9549fc11e557328a21eca6cdc47ab" exitCode=143 Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.620194 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"c84ddadb-263d-4a4a-bc3f-b645c449e392","Type":"ContainerDied","Data":"7472b490592c5f3cc0283af1e322f1bf86e9549fc11e557328a21eca6cdc47ab"} Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.634193 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61" containerName="rabbitmq" containerID="cri-o://a1511484b8d6f60a8bb8cc2011aa0f1a81bd98179bc7f090f9aec83ec900815b" gracePeriod=604800 Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.638367 4757 generic.go:334] "Generic (PLEG): container finished" podID="cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e" containerID="fdf14c9e9a23dfbae4c2900b46030a75c455df62b46a889978d4c18782fa297e" exitCode=0 Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.638395 4757 generic.go:334] "Generic (PLEG): container finished" podID="cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e" containerID="b83693af5dd62fa1415425945aa0fdfbeb7c951800bfc45ba952ad8e5300972e" exitCode=0 Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.638402 4757 generic.go:334] "Generic (PLEG): container finished" podID="cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e" containerID="c590ad932f6bd2ffc90af02ba2b42481210297495161a7a724ec4887afc67fa4" exitCode=0 Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.638409 4757 generic.go:334] "Generic (PLEG): container finished" podID="cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e" containerID="425e4cf04bb701acf0bd6544c54c034243bb49ca6b9336c4557b3370b950c0a1" exitCode=0 Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.638415 4757 generic.go:334] "Generic (PLEG): container finished" podID="cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e" containerID="47cf60ed76e43b20f51da65306480b7f176e194d4f0852c1c215ef43e7f66636" exitCode=0 Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.638422 4757 generic.go:334] "Generic (PLEG): container finished" podID="cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e" containerID="128957938cd8f8a94388394a5a1f34e6af0a9c8532220084d8c4cd91a84f64e4" exitCode=0 Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.638428 4757 generic.go:334] "Generic (PLEG): container finished" podID="cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e" containerID="2ed56ae25da969c6f1c6035ee758bb3bdff3f5231db2cbaa897ed5f375a124d2" exitCode=0 Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.638435 4757 generic.go:334] "Generic (PLEG): container finished" podID="cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e" containerID="aa31fc47a53cc26c6c2aeebfb6be0d53ecc196f7288a242d80a2314eaefca60c" exitCode=0 Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.638442 4757 generic.go:334] "Generic (PLEG): container finished" podID="cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e" containerID="b197392cb5a2b7307583df8b0fbe8691aa093fba29ccd5a59dcfd082146a01a1" exitCode=0 Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.638448 4757 generic.go:334] "Generic (PLEG): container finished" podID="cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e" containerID="4a3e474b0906c602fa42d67ff9a1ce42690ad02c00ae4982e1e8e4c832a3b23e" exitCode=0 Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.638454 4757 generic.go:334] "Generic (PLEG): container finished" podID="cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e" containerID="310e750d598b4155f3b7c06f9255cb7e0c3fd72dc7ef791abab66abcde43d44e" exitCode=0 Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.638460 4757 generic.go:334] "Generic (PLEG): container finished" podID="cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e" containerID="9e6653a1be3ad11eb600a3fb5b459d7efe07dd5b5f22cddfe18a97d785f0ab92" exitCode=0 Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.638467 4757 generic.go:334] "Generic (PLEG): container finished" podID="cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e" containerID="586142d874234725ea31d7fbbe5be2188dabc0eb32d3421991ab34ac2db87ceb" exitCode=0 Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.638473 4757 generic.go:334] "Generic (PLEG): container finished" podID="cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e" containerID="4e76a7ffee77c8b8fe521732d3733beb6c04f9473f44fd387b15b45fb090db2a" exitCode=0 Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.638514 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e","Type":"ContainerDied","Data":"fdf14c9e9a23dfbae4c2900b46030a75c455df62b46a889978d4c18782fa297e"} Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.638539 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e","Type":"ContainerDied","Data":"b83693af5dd62fa1415425945aa0fdfbeb7c951800bfc45ba952ad8e5300972e"} Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.638548 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e","Type":"ContainerDied","Data":"c590ad932f6bd2ffc90af02ba2b42481210297495161a7a724ec4887afc67fa4"} Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.638557 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e","Type":"ContainerDied","Data":"425e4cf04bb701acf0bd6544c54c034243bb49ca6b9336c4557b3370b950c0a1"} Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.638565 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e","Type":"ContainerDied","Data":"47cf60ed76e43b20f51da65306480b7f176e194d4f0852c1c215ef43e7f66636"} Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.638574 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e","Type":"ContainerDied","Data":"128957938cd8f8a94388394a5a1f34e6af0a9c8532220084d8c4cd91a84f64e4"} Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.638583 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e","Type":"ContainerDied","Data":"2ed56ae25da969c6f1c6035ee758bb3bdff3f5231db2cbaa897ed5f375a124d2"} Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.638592 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e","Type":"ContainerDied","Data":"aa31fc47a53cc26c6c2aeebfb6be0d53ecc196f7288a242d80a2314eaefca60c"} Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.638602 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e","Type":"ContainerDied","Data":"b197392cb5a2b7307583df8b0fbe8691aa093fba29ccd5a59dcfd082146a01a1"} Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.638611 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e","Type":"ContainerDied","Data":"4a3e474b0906c602fa42d67ff9a1ce42690ad02c00ae4982e1e8e4c832a3b23e"} Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.638620 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e","Type":"ContainerDied","Data":"310e750d598b4155f3b7c06f9255cb7e0c3fd72dc7ef791abab66abcde43d44e"} Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.638629 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e","Type":"ContainerDied","Data":"9e6653a1be3ad11eb600a3fb5b459d7efe07dd5b5f22cddfe18a97d785f0ab92"} Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.638641 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e","Type":"ContainerDied","Data":"586142d874234725ea31d7fbbe5be2188dabc0eb32d3421991ab34ac2db87ceb"} Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.638650 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e","Type":"ContainerDied","Data":"4e76a7ffee77c8b8fe521732d3733beb6c04f9473f44fd387b15b45fb090db2a"} Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.643313 4757 generic.go:334] "Generic (PLEG): container finished" podID="3482c1c4-b15b-46cb-a897-3528fa22adda" containerID="21bc61d474e55f8578f65885857cf7de3c24140a6afe332ad8c23da23b86ee86" exitCode=0 Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.643396 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-fqwwx" event={"ID":"3482c1c4-b15b-46cb-a897-3528fa22adda","Type":"ContainerDied","Data":"21bc61d474e55f8578f65885857cf7de3c24140a6afe332ad8c23da23b86ee86"} Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.645988 4757 generic.go:334] "Generic (PLEG): container finished" podID="78a4414b-7eec-457f-b08c-aeb719ffc320" containerID="17ce35871062aaa7a1aa4139c7f03addfd9e5a2514de88d4e098daa173fc41a8" exitCode=143 Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.646039 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"78a4414b-7eec-457f-b08c-aeb719ffc320","Type":"ContainerDied","Data":"17ce35871062aaa7a1aa4139c7f03addfd9e5a2514de88d4e098daa173fc41a8"} Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.649917 4757 generic.go:334] "Generic (PLEG): container finished" podID="f2520c55-6861-4a9a-aa4e-8c1b1ace3dfb" containerID="60487956b45ab32166592c2b7eceedfd15fb90339151dc9e4ceaf70a848d1bb5" exitCode=137 Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.667193 4757 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.103:5671: connect: connection refused" Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.830171 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_47e3132d-b23b-47f2-b26e-5511df70deec/ovsdbserver-nb/0.log" Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.830750 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.892902 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-l99t6_f38ce569-0a5c-408d-9d44-bb953d38e24e/openstack-network-exporter/0.log" Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.893156 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-l99t6" Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.893798 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.900377 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5785d8b947-nsbsm" Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.913816 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.923528 4757 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.104:5671: connect: connection refused" Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.943677 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2520c55-6861-4a9a-aa4e-8c1b1ace3dfb-combined-ca-bundle\") pod \"f2520c55-6861-4a9a-aa4e-8c1b1ace3dfb\" (UID: \"f2520c55-6861-4a9a-aa4e-8c1b1ace3dfb\") " Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.943724 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f38ce569-0a5c-408d-9d44-bb953d38e24e-config\") pod \"f38ce569-0a5c-408d-9d44-bb953d38e24e\" (UID: \"f38ce569-0a5c-408d-9d44-bb953d38e24e\") " Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.943790 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/f2520c55-6861-4a9a-aa4e-8c1b1ace3dfb-openstack-config-secret\") pod \"f2520c55-6861-4a9a-aa4e-8c1b1ace3dfb\" (UID: \"f2520c55-6861-4a9a-aa4e-8c1b1ace3dfb\") " Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.943837 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5a2f1fd8-9191-40cd-9edb-6a681214aaa9-scripts\") pod \"5a2f1fd8-9191-40cd-9edb-6a681214aaa9\" (UID: \"5a2f1fd8-9191-40cd-9edb-6a681214aaa9\") " Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.943876 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/f2520c55-6861-4a9a-aa4e-8c1b1ace3dfb-openstack-config\") pod \"f2520c55-6861-4a9a-aa4e-8c1b1ace3dfb\" (UID: \"f2520c55-6861-4a9a-aa4e-8c1b1ace3dfb\") " Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.943916 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f8e78f45-d6b0-44d0-b84f-30ac3538ec3c-dns-svc\") pod \"f8e78f45-d6b0-44d0-b84f-30ac3538ec3c\" (UID: \"f8e78f45-d6b0-44d0-b84f-30ac3538ec3c\") " Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.943955 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/5a2f1fd8-9191-40cd-9edb-6a681214aaa9-etc-machine-id\") pod \"5a2f1fd8-9191-40cd-9edb-6a681214aaa9\" (UID: \"5a2f1fd8-9191-40cd-9edb-6a681214aaa9\") " Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.943984 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/47e3132d-b23b-47f2-b26e-5511df70deec-ovsdbserver-nb-tls-certs\") pod \"47e3132d-b23b-47f2-b26e-5511df70deec\" (UID: \"47e3132d-b23b-47f2-b26e-5511df70deec\") " Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.944013 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/f38ce569-0a5c-408d-9d44-bb953d38e24e-metrics-certs-tls-certs\") pod \"f38ce569-0a5c-408d-9d44-bb953d38e24e\" (UID: \"f38ce569-0a5c-408d-9d44-bb953d38e24e\") " Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.944040 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndbcluster-nb-etc-ovn\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"47e3132d-b23b-47f2-b26e-5511df70deec\" (UID: \"47e3132d-b23b-47f2-b26e-5511df70deec\") " Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.944068 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5a2f1fd8-9191-40cd-9edb-6a681214aaa9-config-data-custom\") pod \"5a2f1fd8-9191-40cd-9edb-6a681214aaa9\" (UID: \"5a2f1fd8-9191-40cd-9edb-6a681214aaa9\") " Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.944106 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/47e3132d-b23b-47f2-b26e-5511df70deec-metrics-certs-tls-certs\") pod \"47e3132d-b23b-47f2-b26e-5511df70deec\" (UID: \"47e3132d-b23b-47f2-b26e-5511df70deec\") " Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.944128 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f38ce569-0a5c-408d-9d44-bb953d38e24e-combined-ca-bundle\") pod \"f38ce569-0a5c-408d-9d44-bb953d38e24e\" (UID: \"f38ce569-0a5c-408d-9d44-bb953d38e24e\") " Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.944152 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/47e3132d-b23b-47f2-b26e-5511df70deec-combined-ca-bundle\") pod \"47e3132d-b23b-47f2-b26e-5511df70deec\" (UID: \"47e3132d-b23b-47f2-b26e-5511df70deec\") " Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.944171 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/f38ce569-0a5c-408d-9d44-bb953d38e24e-ovn-rundir\") pod \"f38ce569-0a5c-408d-9d44-bb953d38e24e\" (UID: \"f38ce569-0a5c-408d-9d44-bb953d38e24e\") " Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.944191 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-77flw\" (UniqueName: \"kubernetes.io/projected/f38ce569-0a5c-408d-9d44-bb953d38e24e-kube-api-access-77flw\") pod \"f38ce569-0a5c-408d-9d44-bb953d38e24e\" (UID: \"f38ce569-0a5c-408d-9d44-bb953d38e24e\") " Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.944211 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f8e78f45-d6b0-44d0-b84f-30ac3538ec3c-ovsdbserver-nb\") pod \"f8e78f45-d6b0-44d0-b84f-30ac3538ec3c\" (UID: \"f8e78f45-d6b0-44d0-b84f-30ac3538ec3c\") " Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.944239 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f8e78f45-d6b0-44d0-b84f-30ac3538ec3c-dns-swift-storage-0\") pod \"f8e78f45-d6b0-44d0-b84f-30ac3538ec3c\" (UID: \"f8e78f45-d6b0-44d0-b84f-30ac3538ec3c\") " Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.944275 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/47e3132d-b23b-47f2-b26e-5511df70deec-config\") pod \"47e3132d-b23b-47f2-b26e-5511df70deec\" (UID: \"47e3132d-b23b-47f2-b26e-5511df70deec\") " Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.944302 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/47e3132d-b23b-47f2-b26e-5511df70deec-scripts\") pod \"47e3132d-b23b-47f2-b26e-5511df70deec\" (UID: \"47e3132d-b23b-47f2-b26e-5511df70deec\") " Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.944326 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5a2f1fd8-9191-40cd-9edb-6a681214aaa9-config-data\") pod \"5a2f1fd8-9191-40cd-9edb-6a681214aaa9\" (UID: \"5a2f1fd8-9191-40cd-9edb-6a681214aaa9\") " Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.944354 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gt4sk\" (UniqueName: \"kubernetes.io/projected/5a2f1fd8-9191-40cd-9edb-6a681214aaa9-kube-api-access-gt4sk\") pod \"5a2f1fd8-9191-40cd-9edb-6a681214aaa9\" (UID: \"5a2f1fd8-9191-40cd-9edb-6a681214aaa9\") " Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.944375 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a2f1fd8-9191-40cd-9edb-6a681214aaa9-combined-ca-bundle\") pod \"5a2f1fd8-9191-40cd-9edb-6a681214aaa9\" (UID: \"5a2f1fd8-9191-40cd-9edb-6a681214aaa9\") " Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.944402 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f8e78f45-d6b0-44d0-b84f-30ac3538ec3c-ovsdbserver-sb\") pod \"f8e78f45-d6b0-44d0-b84f-30ac3538ec3c\" (UID: \"f8e78f45-d6b0-44d0-b84f-30ac3538ec3c\") " Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.944433 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8kk2g\" (UniqueName: \"kubernetes.io/projected/47e3132d-b23b-47f2-b26e-5511df70deec-kube-api-access-8kk2g\") pod \"47e3132d-b23b-47f2-b26e-5511df70deec\" (UID: \"47e3132d-b23b-47f2-b26e-5511df70deec\") " Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.944470 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f8e78f45-d6b0-44d0-b84f-30ac3538ec3c-config\") pod \"f8e78f45-d6b0-44d0-b84f-30ac3538ec3c\" (UID: \"f8e78f45-d6b0-44d0-b84f-30ac3538ec3c\") " Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.944494 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/47e3132d-b23b-47f2-b26e-5511df70deec-ovsdb-rundir\") pod \"47e3132d-b23b-47f2-b26e-5511df70deec\" (UID: \"47e3132d-b23b-47f2-b26e-5511df70deec\") " Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.944513 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/f38ce569-0a5c-408d-9d44-bb953d38e24e-ovs-rundir\") pod \"f38ce569-0a5c-408d-9d44-bb953d38e24e\" (UID: \"f38ce569-0a5c-408d-9d44-bb953d38e24e\") " Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.944542 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k555p\" (UniqueName: \"kubernetes.io/projected/f8e78f45-d6b0-44d0-b84f-30ac3538ec3c-kube-api-access-k555p\") pod \"f8e78f45-d6b0-44d0-b84f-30ac3538ec3c\" (UID: \"f8e78f45-d6b0-44d0-b84f-30ac3538ec3c\") " Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.944572 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tl8nl\" (UniqueName: \"kubernetes.io/projected/f2520c55-6861-4a9a-aa4e-8c1b1ace3dfb-kube-api-access-tl8nl\") pod \"f2520c55-6861-4a9a-aa4e-8c1b1ace3dfb\" (UID: \"f2520c55-6861-4a9a-aa4e-8c1b1ace3dfb\") " Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.953467 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f38ce569-0a5c-408d-9d44-bb953d38e24e-ovn-rundir" (OuterVolumeSpecName: "ovn-rundir") pod "f38ce569-0a5c-408d-9d44-bb953d38e24e" (UID: "f38ce569-0a5c-408d-9d44-bb953d38e24e"). InnerVolumeSpecName "ovn-rundir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.963999 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-kn7bx" Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.966328 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f38ce569-0a5c-408d-9d44-bb953d38e24e-ovs-rundir" (OuterVolumeSpecName: "ovs-rundir") pod "f38ce569-0a5c-408d-9d44-bb953d38e24e" (UID: "f38ce569-0a5c-408d-9d44-bb953d38e24e"). InnerVolumeSpecName "ovs-rundir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.966781 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/47e3132d-b23b-47f2-b26e-5511df70deec-ovsdb-rundir" (OuterVolumeSpecName: "ovsdb-rundir") pod "47e3132d-b23b-47f2-b26e-5511df70deec" (UID: "47e3132d-b23b-47f2-b26e-5511df70deec"). InnerVolumeSpecName "ovsdb-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.968356 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/47e3132d-b23b-47f2-b26e-5511df70deec-config" (OuterVolumeSpecName: "config") pod "47e3132d-b23b-47f2-b26e-5511df70deec" (UID: "47e3132d-b23b-47f2-b26e-5511df70deec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.968649 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5a2f1fd8-9191-40cd-9edb-6a681214aaa9-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "5a2f1fd8-9191-40cd-9edb-6a681214aaa9" (UID: "5a2f1fd8-9191-40cd-9edb-6a681214aaa9"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.968897 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5a2f1fd8-9191-40cd-9edb-6a681214aaa9-kube-api-access-gt4sk" (OuterVolumeSpecName: "kube-api-access-gt4sk") pod "5a2f1fd8-9191-40cd-9edb-6a681214aaa9" (UID: "5a2f1fd8-9191-40cd-9edb-6a681214aaa9"). InnerVolumeSpecName "kube-api-access-gt4sk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.969653 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/47e3132d-b23b-47f2-b26e-5511df70deec-scripts" (OuterVolumeSpecName: "scripts") pod "47e3132d-b23b-47f2-b26e-5511df70deec" (UID: "47e3132d-b23b-47f2-b26e-5511df70deec"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.970296 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f38ce569-0a5c-408d-9d44-bb953d38e24e-config" (OuterVolumeSpecName: "config") pod "f38ce569-0a5c-408d-9d44-bb953d38e24e" (UID: "f38ce569-0a5c-408d-9d44-bb953d38e24e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.973294 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f2520c55-6861-4a9a-aa4e-8c1b1ace3dfb-kube-api-access-tl8nl" (OuterVolumeSpecName: "kube-api-access-tl8nl") pod "f2520c55-6861-4a9a-aa4e-8c1b1ace3dfb" (UID: "f2520c55-6861-4a9a-aa4e-8c1b1ace3dfb"). InnerVolumeSpecName "kube-api-access-tl8nl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.976309 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f8e78f45-d6b0-44d0-b84f-30ac3538ec3c-kube-api-access-k555p" (OuterVolumeSpecName: "kube-api-access-k555p") pod "f8e78f45-d6b0-44d0-b84f-30ac3538ec3c" (UID: "f8e78f45-d6b0-44d0-b84f-30ac3538ec3c"). InnerVolumeSpecName "kube-api-access-k555p". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.981543 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5a2f1fd8-9191-40cd-9edb-6a681214aaa9-scripts" (OuterVolumeSpecName: "scripts") pod "5a2f1fd8-9191-40cd-9edb-6a681214aaa9" (UID: "5a2f1fd8-9191-40cd-9edb-6a681214aaa9"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.984108 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage07-crc" (OuterVolumeSpecName: "ovndbcluster-nb-etc-ovn") pod "47e3132d-b23b-47f2-b26e-5511df70deec" (UID: "47e3132d-b23b-47f2-b26e-5511df70deec"). InnerVolumeSpecName "local-storage07-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.989573 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/47e3132d-b23b-47f2-b26e-5511df70deec-kube-api-access-8kk2g" (OuterVolumeSpecName: "kube-api-access-8kk2g") pod "47e3132d-b23b-47f2-b26e-5511df70deec" (UID: "47e3132d-b23b-47f2-b26e-5511df70deec"). InnerVolumeSpecName "kube-api-access-8kk2g". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.989793 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5a2f1fd8-9191-40cd-9edb-6a681214aaa9-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "5a2f1fd8-9191-40cd-9edb-6a681214aaa9" (UID: "5a2f1fd8-9191-40cd-9edb-6a681214aaa9"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 14:00:21 crc kubenswrapper[4757]: I1006 14:00:21.999308 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f38ce569-0a5c-408d-9d44-bb953d38e24e-kube-api-access-77flw" (OuterVolumeSpecName: "kube-api-access-77flw") pod "f38ce569-0a5c-408d-9d44-bb953d38e24e" (UID: "f38ce569-0a5c-408d-9d44-bb953d38e24e"). InnerVolumeSpecName "kube-api-access-77flw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.046049 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0efda247-fa18-49db-a37d-1dd28d999ed7-scripts\") pod \"0efda247-fa18-49db-a37d-1dd28d999ed7\" (UID: \"0efda247-fa18-49db-a37d-1dd28d999ed7\") " Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.046157 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/0efda247-fa18-49db-a37d-1dd28d999ed7-var-log-ovn\") pod \"0efda247-fa18-49db-a37d-1dd28d999ed7\" (UID: \"0efda247-fa18-49db-a37d-1dd28d999ed7\") " Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.046234 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/0efda247-fa18-49db-a37d-1dd28d999ed7-var-run\") pod \"0efda247-fa18-49db-a37d-1dd28d999ed7\" (UID: \"0efda247-fa18-49db-a37d-1dd28d999ed7\") " Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.046351 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/0efda247-fa18-49db-a37d-1dd28d999ed7-ovn-controller-tls-certs\") pod \"0efda247-fa18-49db-a37d-1dd28d999ed7\" (UID: \"0efda247-fa18-49db-a37d-1dd28d999ed7\") " Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.046464 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4bd5p\" (UniqueName: \"kubernetes.io/projected/0efda247-fa18-49db-a37d-1dd28d999ed7-kube-api-access-4bd5p\") pod \"0efda247-fa18-49db-a37d-1dd28d999ed7\" (UID: \"0efda247-fa18-49db-a37d-1dd28d999ed7\") " Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.046442 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0efda247-fa18-49db-a37d-1dd28d999ed7-var-run" (OuterVolumeSpecName: "var-run") pod "0efda247-fa18-49db-a37d-1dd28d999ed7" (UID: "0efda247-fa18-49db-a37d-1dd28d999ed7"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.046521 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0efda247-fa18-49db-a37d-1dd28d999ed7-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "0efda247-fa18-49db-a37d-1dd28d999ed7" (UID: "0efda247-fa18-49db-a37d-1dd28d999ed7"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.046493 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/0efda247-fa18-49db-a37d-1dd28d999ed7-var-run-ovn\") pod \"0efda247-fa18-49db-a37d-1dd28d999ed7\" (UID: \"0efda247-fa18-49db-a37d-1dd28d999ed7\") " Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.046784 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0efda247-fa18-49db-a37d-1dd28d999ed7-combined-ca-bundle\") pod \"0efda247-fa18-49db-a37d-1dd28d999ed7\" (UID: \"0efda247-fa18-49db-a37d-1dd28d999ed7\") " Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.047268 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0efda247-fa18-49db-a37d-1dd28d999ed7-scripts" (OuterVolumeSpecName: "scripts") pod "0efda247-fa18-49db-a37d-1dd28d999ed7" (UID: "0efda247-fa18-49db-a37d-1dd28d999ed7"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.046303 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0efda247-fa18-49db-a37d-1dd28d999ed7-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "0efda247-fa18-49db-a37d-1dd28d999ed7" (UID: "0efda247-fa18-49db-a37d-1dd28d999ed7"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.048117 4757 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0efda247-fa18-49db-a37d-1dd28d999ed7-scripts\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.048144 4757 reconciler_common.go:293] "Volume detached for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/f38ce569-0a5c-408d-9d44-bb953d38e24e-ovn-rundir\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.048157 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-77flw\" (UniqueName: \"kubernetes.io/projected/f38ce569-0a5c-408d-9d44-bb953d38e24e-kube-api-access-77flw\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.048170 4757 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/0efda247-fa18-49db-a37d-1dd28d999ed7-var-log-ovn\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.048183 4757 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/47e3132d-b23b-47f2-b26e-5511df70deec-config\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.048194 4757 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/47e3132d-b23b-47f2-b26e-5511df70deec-scripts\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.048205 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gt4sk\" (UniqueName: \"kubernetes.io/projected/5a2f1fd8-9191-40cd-9edb-6a681214aaa9-kube-api-access-gt4sk\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.048217 4757 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/0efda247-fa18-49db-a37d-1dd28d999ed7-var-run\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.048228 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8kk2g\" (UniqueName: \"kubernetes.io/projected/47e3132d-b23b-47f2-b26e-5511df70deec-kube-api-access-8kk2g\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.048240 4757 reconciler_common.go:293] "Volume detached for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/47e3132d-b23b-47f2-b26e-5511df70deec-ovsdb-rundir\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.048252 4757 reconciler_common.go:293] "Volume detached for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/f38ce569-0a5c-408d-9d44-bb953d38e24e-ovs-rundir\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.048264 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k555p\" (UniqueName: \"kubernetes.io/projected/f8e78f45-d6b0-44d0-b84f-30ac3538ec3c-kube-api-access-k555p\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.048275 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tl8nl\" (UniqueName: \"kubernetes.io/projected/f2520c55-6861-4a9a-aa4e-8c1b1ace3dfb-kube-api-access-tl8nl\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.048286 4757 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f38ce569-0a5c-408d-9d44-bb953d38e24e-config\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.048297 4757 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5a2f1fd8-9191-40cd-9edb-6a681214aaa9-scripts\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.048307 4757 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/0efda247-fa18-49db-a37d-1dd28d999ed7-var-run-ovn\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.048319 4757 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/5a2f1fd8-9191-40cd-9edb-6a681214aaa9-etc-machine-id\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.048350 4757 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" " Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.048365 4757 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5a2f1fd8-9191-40cd-9edb-6a681214aaa9-config-data-custom\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.050193 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0efda247-fa18-49db-a37d-1dd28d999ed7-kube-api-access-4bd5p" (OuterVolumeSpecName: "kube-api-access-4bd5p") pod "0efda247-fa18-49db-a37d-1dd28d999ed7" (UID: "0efda247-fa18-49db-a37d-1dd28d999ed7"). InnerVolumeSpecName "kube-api-access-4bd5p". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.069119 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f2520c55-6861-4a9a-aa4e-8c1b1ace3dfb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f2520c55-6861-4a9a-aa4e-8c1b1ace3dfb" (UID: "f2520c55-6861-4a9a-aa4e-8c1b1ace3dfb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.140515 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f38ce569-0a5c-408d-9d44-bb953d38e24e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f38ce569-0a5c-408d-9d44-bb953d38e24e" (UID: "f38ce569-0a5c-408d-9d44-bb953d38e24e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.142189 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_780fa947-0a68-4231-b5b3-e0cad80204d2/ovsdbserver-sb/0.log" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.142253 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.156784 4757 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f38ce569-0a5c-408d-9d44-bb953d38e24e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.156819 4757 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2520c55-6861-4a9a-aa4e-8c1b1ace3dfb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.156828 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4bd5p\" (UniqueName: \"kubernetes.io/projected/0efda247-fa18-49db-a37d-1dd28d999ed7-kube-api-access-4bd5p\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.202865 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1ced3006-a395-423b-8429-f35beb1398b0" path="/var/lib/kubelet/pods/1ced3006-a395-423b-8429-f35beb1398b0/volumes" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.214059 4757 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage07-crc" (UniqueName: "kubernetes.io/local-volume/local-storage07-crc") on node "crc" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.222533 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c17521-7f6c-4704-92c2-e075c6e35f5d" path="/var/lib/kubelet/pods/22c17521-7f6c-4704-92c2-e075c6e35f5d/volumes" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.223558 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="48547398-bd84-4b69-b6a8-b3db5e7e32a9" path="/var/lib/kubelet/pods/48547398-bd84-4b69-b6a8-b3db5e7e32a9/volumes" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.224247 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="68bdda02-206a-460a-b6a9-7ab492a1f518" path="/var/lib/kubelet/pods/68bdda02-206a-460a-b6a9-7ab492a1f518/volumes" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.227693 4757 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/neutron-587f5f887c-jf8v5" podUID="0aa7bf0a-b5c3-4db6-b401-e8512e1df933" containerName="neutron-httpd" probeResult="failure" output="Get \"https://10.217.0.160:9696/\": dial tcp 10.217.0.160:9696: connect: connection refused" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.228073 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="86a61f37-c383-4faf-b0b1-0abfd8866683" path="/var/lib/kubelet/pods/86a61f37-c383-4faf-b0b1-0abfd8866683/volumes" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.228855 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="91433fac-000a-4c4c-bfc4-7f2e7c762483" path="/var/lib/kubelet/pods/91433fac-000a-4c4c-bfc4-7f2e7c762483/volumes" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.229408 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0a0bfd7-a0ed-468e-9226-ae8f99e7b457" path="/var/lib/kubelet/pods/a0a0bfd7-a0ed-468e-9226-ae8f99e7b457/volumes" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.234504 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a5cfcfe4-1c22-4459-a88c-026da067c650" path="/var/lib/kubelet/pods/a5cfcfe4-1c22-4459-a88c-026da067c650/volumes" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.235116 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b257515d-a807-4a28-8b28-0e6390aa9d42" path="/var/lib/kubelet/pods/b257515d-a807-4a28-8b28-0e6390aa9d42/volumes" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.235751 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c2681f2d-ee52-4508-81f8-4e20289fb03d" path="/var/lib/kubelet/pods/c2681f2d-ee52-4508-81f8-4e20289fb03d/volumes" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.236268 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c6437e0a-2b4e-4f14-b7b8-573d464fad02" path="/var/lib/kubelet/pods/c6437e0a-2b4e-4f14-b7b8-573d464fad02/volumes" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.237195 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d11e5852-b82d-4633-9102-b3153ff76ab8" path="/var/lib/kubelet/pods/d11e5852-b82d-4633-9102-b3153ff76ab8/volumes" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.238031 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ed46f68f-a023-405d-8b70-4195b82ed6eb" path="/var/lib/kubelet/pods/ed46f68f-a023-405d-8b70-4195b82ed6eb/volumes" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.238849 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f705f558-519e-489e-8ff4-5b3eb4476eff" path="/var/lib/kubelet/pods/f705f558-519e-489e-8ff4-5b3eb4476eff/volumes" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.241086 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fcfaae90-f485-480d-9736-cbdbbb0e4ffa" path="/var/lib/kubelet/pods/fcfaae90-f485-480d-9736-cbdbbb0e4ffa/volumes" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.242088 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fe9e11b6-6610-4dc3-a5fc-94142df40ee3" path="/var/lib/kubelet/pods/fe9e11b6-6610-4dc3-a5fc-94142df40ee3/volumes" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.259951 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-96jg5\" (UniqueName: \"kubernetes.io/projected/780fa947-0a68-4231-b5b3-e0cad80204d2-kube-api-access-96jg5\") pod \"780fa947-0a68-4231-b5b3-e0cad80204d2\" (UID: \"780fa947-0a68-4231-b5b3-e0cad80204d2\") " Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.260072 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndbcluster-sb-etc-ovn\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"780fa947-0a68-4231-b5b3-e0cad80204d2\" (UID: \"780fa947-0a68-4231-b5b3-e0cad80204d2\") " Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.260282 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/780fa947-0a68-4231-b5b3-e0cad80204d2-combined-ca-bundle\") pod \"780fa947-0a68-4231-b5b3-e0cad80204d2\" (UID: \"780fa947-0a68-4231-b5b3-e0cad80204d2\") " Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.260408 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/780fa947-0a68-4231-b5b3-e0cad80204d2-scripts\") pod \"780fa947-0a68-4231-b5b3-e0cad80204d2\" (UID: \"780fa947-0a68-4231-b5b3-e0cad80204d2\") " Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.260511 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/780fa947-0a68-4231-b5b3-e0cad80204d2-ovsdb-rundir\") pod \"780fa947-0a68-4231-b5b3-e0cad80204d2\" (UID: \"780fa947-0a68-4231-b5b3-e0cad80204d2\") " Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.260630 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/780fa947-0a68-4231-b5b3-e0cad80204d2-metrics-certs-tls-certs\") pod \"780fa947-0a68-4231-b5b3-e0cad80204d2\" (UID: \"780fa947-0a68-4231-b5b3-e0cad80204d2\") " Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.260745 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/780fa947-0a68-4231-b5b3-e0cad80204d2-ovsdbserver-sb-tls-certs\") pod \"780fa947-0a68-4231-b5b3-e0cad80204d2\" (UID: \"780fa947-0a68-4231-b5b3-e0cad80204d2\") " Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.260865 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/780fa947-0a68-4231-b5b3-e0cad80204d2-config\") pod \"780fa947-0a68-4231-b5b3-e0cad80204d2\" (UID: \"780fa947-0a68-4231-b5b3-e0cad80204d2\") " Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.261984 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/780fa947-0a68-4231-b5b3-e0cad80204d2-scripts" (OuterVolumeSpecName: "scripts") pod "780fa947-0a68-4231-b5b3-e0cad80204d2" (UID: "780fa947-0a68-4231-b5b3-e0cad80204d2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.262941 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/780fa947-0a68-4231-b5b3-e0cad80204d2-ovsdb-rundir" (OuterVolumeSpecName: "ovsdb-rundir") pod "780fa947-0a68-4231-b5b3-e0cad80204d2" (UID: "780fa947-0a68-4231-b5b3-e0cad80204d2"). InnerVolumeSpecName "ovsdb-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.263194 4757 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/780fa947-0a68-4231-b5b3-e0cad80204d2-scripts\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.263505 4757 reconciler_common.go:293] "Volume detached for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.263382 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/780fa947-0a68-4231-b5b3-e0cad80204d2-config" (OuterVolumeSpecName: "config") pod "780fa947-0a68-4231-b5b3-e0cad80204d2" (UID: "780fa947-0a68-4231-b5b3-e0cad80204d2"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.292472 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron2bb5-account-delete-r8kww"] Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.292785 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage08-crc" (OuterVolumeSpecName: "ovndbcluster-sb-etc-ovn") pod "780fa947-0a68-4231-b5b3-e0cad80204d2" (UID: "780fa947-0a68-4231-b5b3-e0cad80204d2"). InnerVolumeSpecName "local-storage08-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.303471 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/novaapid1d1-account-delete-j7wzz"] Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.307045 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/780fa947-0a68-4231-b5b3-e0cad80204d2-kube-api-access-96jg5" (OuterVolumeSpecName: "kube-api-access-96jg5") pod "780fa947-0a68-4231-b5b3-e0cad80204d2" (UID: "780fa947-0a68-4231-b5b3-e0cad80204d2"). InnerVolumeSpecName "kube-api-access-96jg5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 14:00:22 crc kubenswrapper[4757]: E1006 14:00:22.324907 4757 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="656d1308014c9035d686484ef2821c17e6d01aebe44ce39b183a689e42ab6a36" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 06 14:00:22 crc kubenswrapper[4757]: E1006 14:00:22.326011 4757 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="656d1308014c9035d686484ef2821c17e6d01aebe44ce39b183a689e42ab6a36" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 06 14:00:22 crc kubenswrapper[4757]: E1006 14:00:22.334677 4757 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="656d1308014c9035d686484ef2821c17e6d01aebe44ce39b183a689e42ab6a36" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 06 14:00:22 crc kubenswrapper[4757]: E1006 14:00:22.334746 4757 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="19ac04ce-d95a-49ab-8eb2-eaf505990a53" containerName="nova-scheduler-scheduler" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.339820 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f2520c55-6861-4a9a-aa4e-8c1b1ace3dfb-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "f2520c55-6861-4a9a-aa4e-8c1b1ace3dfb" (UID: "f2520c55-6861-4a9a-aa4e-8c1b1ace3dfb"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.365378 4757 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/780fa947-0a68-4231-b5b3-e0cad80204d2-config\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.365483 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-96jg5\" (UniqueName: \"kubernetes.io/projected/780fa947-0a68-4231-b5b3-e0cad80204d2-kube-api-access-96jg5\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.365538 4757 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/f2520c55-6861-4a9a-aa4e-8c1b1ace3dfb-openstack-config\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.365618 4757 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" " Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.365680 4757 reconciler_common.go:293] "Volume detached for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/780fa947-0a68-4231-b5b3-e0cad80204d2-ovsdb-rundir\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.401076 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0efda247-fa18-49db-a37d-1dd28d999ed7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0efda247-fa18-49db-a37d-1dd28d999ed7" (UID: "0efda247-fa18-49db-a37d-1dd28d999ed7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.410841 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f8e78f45-d6b0-44d0-b84f-30ac3538ec3c-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "f8e78f45-d6b0-44d0-b84f-30ac3538ec3c" (UID: "f8e78f45-d6b0-44d0-b84f-30ac3538ec3c"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.446259 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/novacell00cee-account-delete-2rcjp"] Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.451714 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/47e3132d-b23b-47f2-b26e-5511df70deec-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "47e3132d-b23b-47f2-b26e-5511df70deec" (UID: "47e3132d-b23b-47f2-b26e-5511df70deec"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.453693 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican39b0-account-delete-gptgt"] Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.468383 4757 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/47e3132d-b23b-47f2-b26e-5511df70deec-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.468422 4757 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f8e78f45-d6b0-44d0-b84f-30ac3538ec3c-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.468435 4757 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0efda247-fa18-49db-a37d-1dd28d999ed7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.476791 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f2520c55-6861-4a9a-aa4e-8c1b1ace3dfb-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "f2520c55-6861-4a9a-aa4e-8c1b1ace3dfb" (UID: "f2520c55-6861-4a9a-aa4e-8c1b1ace3dfb"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 14:00:22 crc kubenswrapper[4757]: W1006 14:00:22.478956 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod76f4e4cc_eccd_4c44_a39a_a75c06383d92.slice/crio-7b5efd228eae4b6a01a28f5817ab8ab8af6f60b4f6db2dd3b93df8009bae97e1 WatchSource:0}: Error finding container 7b5efd228eae4b6a01a28f5817ab8ab8af6f60b4f6db2dd3b93df8009bae97e1: Status 404 returned error can't find the container with id 7b5efd228eae4b6a01a28f5817ab8ab8af6f60b4f6db2dd3b93df8009bae97e1 Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.480623 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/780fa947-0a68-4231-b5b3-e0cad80204d2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "780fa947-0a68-4231-b5b3-e0cad80204d2" (UID: "780fa947-0a68-4231-b5b3-e0cad80204d2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.489293 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f8e78f45-d6b0-44d0-b84f-30ac3538ec3c-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "f8e78f45-d6b0-44d0-b84f-30ac3538ec3c" (UID: "f8e78f45-d6b0-44d0-b84f-30ac3538ec3c"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.493393 4757 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage08-crc" (UniqueName: "kubernetes.io/local-volume/local-storage08-crc") on node "crc" Oct 06 14:00:22 crc kubenswrapper[4757]: W1006 14:00:22.498996 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7904f7d1_2332_4402_bd0b_4a40f5be43f9.slice/crio-085e26901b7a5a97a82c0c1a166bdfc809aef264b9df75d2486547fd26e622ca WatchSource:0}: Error finding container 085e26901b7a5a97a82c0c1a166bdfc809aef264b9df75d2486547fd26e622ca: Status 404 returned error can't find the container with id 085e26901b7a5a97a82c0c1a166bdfc809aef264b9df75d2486547fd26e622ca Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.517528 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5a2f1fd8-9191-40cd-9edb-6a681214aaa9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5a2f1fd8-9191-40cd-9edb-6a681214aaa9" (UID: "5a2f1fd8-9191-40cd-9edb-6a681214aaa9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.560064 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f8e78f45-d6b0-44d0-b84f-30ac3538ec3c-config" (OuterVolumeSpecName: "config") pod "f8e78f45-d6b0-44d0-b84f-30ac3538ec3c" (UID: "f8e78f45-d6b0-44d0-b84f-30ac3538ec3c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.574436 4757 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f8e78f45-d6b0-44d0-b84f-30ac3538ec3c-config\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.574477 4757 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/f2520c55-6861-4a9a-aa4e-8c1b1ace3dfb-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.574492 4757 reconciler_common.go:293] "Volume detached for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.574503 4757 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/780fa947-0a68-4231-b5b3-e0cad80204d2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.574513 4757 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f8e78f45-d6b0-44d0-b84f-30ac3538ec3c-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.574521 4757 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a2f1fd8-9191-40cd-9edb-6a681214aaa9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.582458 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f8e78f45-d6b0-44d0-b84f-30ac3538ec3c-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "f8e78f45-d6b0-44d0-b84f-30ac3538ec3c" (UID: "f8e78f45-d6b0-44d0-b84f-30ac3538ec3c"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.595454 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/47e3132d-b23b-47f2-b26e-5511df70deec-ovsdbserver-nb-tls-certs" (OuterVolumeSpecName: "ovsdbserver-nb-tls-certs") pod "47e3132d-b23b-47f2-b26e-5511df70deec" (UID: "47e3132d-b23b-47f2-b26e-5511df70deec"). InnerVolumeSpecName "ovsdbserver-nb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.619229 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f8e78f45-d6b0-44d0-b84f-30ac3538ec3c-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "f8e78f45-d6b0-44d0-b84f-30ac3538ec3c" (UID: "f8e78f45-d6b0-44d0-b84f-30ac3538ec3c"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.627982 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0efda247-fa18-49db-a37d-1dd28d999ed7-ovn-controller-tls-certs" (OuterVolumeSpecName: "ovn-controller-tls-certs") pod "0efda247-fa18-49db-a37d-1dd28d999ed7" (UID: "0efda247-fa18-49db-a37d-1dd28d999ed7"). InnerVolumeSpecName "ovn-controller-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.638483 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5a2f1fd8-9191-40cd-9edb-6a681214aaa9-config-data" (OuterVolumeSpecName: "config-data") pod "5a2f1fd8-9191-40cd-9edb-6a681214aaa9" (UID: "5a2f1fd8-9191-40cd-9edb-6a681214aaa9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.666313 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/47e3132d-b23b-47f2-b26e-5511df70deec-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "47e3132d-b23b-47f2-b26e-5511df70deec" (UID: "47e3132d-b23b-47f2-b26e-5511df70deec"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.671474 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f38ce569-0a5c-408d-9d44-bb953d38e24e-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "f38ce569-0a5c-408d-9d44-bb953d38e24e" (UID: "f38ce569-0a5c-408d-9d44-bb953d38e24e"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.682486 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5785d8b947-nsbsm" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.681560 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5785d8b947-nsbsm" event={"ID":"f8e78f45-d6b0-44d0-b84f-30ac3538ec3c","Type":"ContainerDied","Data":"9ad49b788aaf023754763ccf3c18a79bdea0283d4edaa1a79c20b4d29e7921ef"} Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.685541 4757 scope.go:117] "RemoveContainer" containerID="96c3bb713c662f2ed04a7bb1393bbac6037596a56af39675d70856aba417eb71" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.685965 4757 reconciler_common.go:293] "Volume detached for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/0efda247-fa18-49db-a37d-1dd28d999ed7-ovn-controller-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.686006 4757 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/47e3132d-b23b-47f2-b26e-5511df70deec-ovsdbserver-nb-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.686017 4757 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/f38ce569-0a5c-408d-9d44-bb953d38e24e-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.686027 4757 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/47e3132d-b23b-47f2-b26e-5511df70deec-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.686035 4757 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f8e78f45-d6b0-44d0-b84f-30ac3538ec3c-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.686044 4757 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5a2f1fd8-9191-40cd-9edb-6a681214aaa9-config-data\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.686052 4757 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f8e78f45-d6b0-44d0-b84f-30ac3538ec3c-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.687919 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/780fa947-0a68-4231-b5b3-e0cad80204d2-ovsdbserver-sb-tls-certs" (OuterVolumeSpecName: "ovsdbserver-sb-tls-certs") pod "780fa947-0a68-4231-b5b3-e0cad80204d2" (UID: "780fa947-0a68-4231-b5b3-e0cad80204d2"). InnerVolumeSpecName "ovsdbserver-sb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.688628 4757 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cinder-api-0" podUID="a274a347-259a-4919-8326-8047df9b0de8" containerName="cinder-api" probeResult="failure" output="Get \"https://10.217.0.167:8776/healthcheck\": read tcp 10.217.0.2:56540->10.217.0.167:8776: read: connection reset by peer" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.700623 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-kn7bx" event={"ID":"0efda247-fa18-49db-a37d-1dd28d999ed7","Type":"ContainerDied","Data":"143cf145148e00a2089644312e7abc33d0e12ff33663c6a61f3f0d83eace14ab"} Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.700661 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-kn7bx" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.713678 4757 generic.go:334] "Generic (PLEG): container finished" podID="3e6333c1-01c0-42fd-a75f-31a2c57e9db2" containerID="b87d6d10ad564103f56d850792c4af8bcf31280f5d0ac840a8140a30f0613988" exitCode=143 Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.713812 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-996ccd7c9-wv7n4" event={"ID":"3e6333c1-01c0-42fd-a75f-31a2c57e9db2","Type":"ContainerDied","Data":"b87d6d10ad564103f56d850792c4af8bcf31280f5d0ac840a8140a30f0613988"} Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.720449 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"5a2f1fd8-9191-40cd-9edb-6a681214aaa9","Type":"ContainerDied","Data":"dd2060be9b6e0c61009bca07a311cf253468db4ae06c7b23b09a8177a5d40b53"} Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.720535 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.723879 4757 generic.go:334] "Generic (PLEG): container finished" podID="61d68e86-89f3-4dc6-bb42-7286c789fbba" containerID="1e20cdc4c0ab7fbe34675da2bd3b70c163eadea2e1127276d5eac3f283996f8c" exitCode=0 Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.723954 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"61d68e86-89f3-4dc6-bb42-7286c789fbba","Type":"ContainerDied","Data":"1e20cdc4c0ab7fbe34675da2bd3b70c163eadea2e1127276d5eac3f283996f8c"} Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.723995 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"61d68e86-89f3-4dc6-bb42-7286c789fbba","Type":"ContainerDied","Data":"5d5fcb749d2b4c58c4bdece98c3ba1ea71d7b4c44b5f0b2ca7ac765c7da66b28"} Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.724009 4757 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5d5fcb749d2b4c58c4bdece98c3ba1ea71d7b4c44b5f0b2ca7ac765c7da66b28" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.726317 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novaapid1d1-account-delete-j7wzz" event={"ID":"24ed1753-25de-4bde-8158-52cb7dd6a2f1","Type":"ContainerStarted","Data":"7635fb16ef8f1e32964fecb1d97c5ab0e2ed1ae89d068c9b38c0ab1825343220"} Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.735765 4757 generic.go:334] "Generic (PLEG): container finished" podID="985540de-3212-41f4-a3a6-180ff5c4eda2" containerID="8657327aee1067ffca05e4e9c577e2dddc6b63823ba1e0a17d6ec6a140fc3cc0" exitCode=143 Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.735826 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-6f4c564f58-t9kxj" event={"ID":"985540de-3212-41f4-a3a6-180ff5c4eda2","Type":"ContainerDied","Data":"8657327aee1067ffca05e4e9c577e2dddc6b63823ba1e0a17d6ec6a140fc3cc0"} Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.746584 4757 generic.go:334] "Generic (PLEG): container finished" podID="ac42434c-8367-4cf2-9134-2d85444f90f4" containerID="92484d139159526d958a23b09b865ab4b1219b4eb5727898baa360c9ff0a42c2" exitCode=143 Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.746645 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5c78d5595b-89h8z" event={"ID":"ac42434c-8367-4cf2-9134-2d85444f90f4","Type":"ContainerDied","Data":"92484d139159526d958a23b09b865ab4b1219b4eb5727898baa360c9ff0a42c2"} Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.751253 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/780fa947-0a68-4231-b5b3-e0cad80204d2-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "780fa947-0a68-4231-b5b3-e0cad80204d2" (UID: "780fa947-0a68-4231-b5b3-e0cad80204d2"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.753197 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron2bb5-account-delete-r8kww" event={"ID":"dffbfc0a-c4ec-41cc-873b-552bc6b7fa69","Type":"ContainerStarted","Data":"37eaf9ca56a0cec49c024bca928dd0c3ae217f772bcb928eb42a803e3aee9250"} Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.767956 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.780699 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_780fa947-0a68-4231-b5b3-e0cad80204d2/ovsdbserver-sb/0.log" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.780809 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.781745 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"780fa947-0a68-4231-b5b3-e0cad80204d2","Type":"ContainerDied","Data":"b6abf6024cef72c54fb94eb234ce9a63c024a2e35ede681a39f9c72cfba3d310"} Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.783972 4757 scope.go:117] "RemoveContainer" containerID="2bb3cf19fc535ce8b640e13e4f2624e106cf69b9da5982d604eed9ab677dace2" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.789168 4757 generic.go:334] "Generic (PLEG): container finished" podID="ec6e1479-3b96-4c4b-be95-5834172d37ff" containerID="14022a75e2f882a054a2a8082d11133582e6585d36ef89867d94fb837bf54886" exitCode=0 Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.795150 4757 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/780fa947-0a68-4231-b5b3-e0cad80204d2-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.801175 4757 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/780fa947-0a68-4231-b5b3-e0cad80204d2-ovsdbserver-sb-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.800392 4757 generic.go:334] "Generic (PLEG): container finished" podID="39ef4372-3b20-44b5-b441-85f963e6a25a" containerID="c6ba27d47c99f13d015277533c3fba4500d832d05fa4632f9ba24c71c9d9ec8e" exitCode=0 Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.797212 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-7c7848899c-8bm7g" event={"ID":"ec6e1479-3b96-4c4b-be95-5834172d37ff","Type":"ContainerDied","Data":"14022a75e2f882a054a2a8082d11133582e6585d36ef89867d94fb837bf54886"} Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.801523 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-7c7848899c-8bm7g" event={"ID":"ec6e1479-3b96-4c4b-be95-5834172d37ff","Type":"ContainerDied","Data":"e883a3dae22fdf60514c219a9283e92bb5abf320e9d17268731d006765c50594"} Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.801558 4757 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e883a3dae22fdf60514c219a9283e92bb5abf320e9d17268731d006765c50594" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.801571 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"39ef4372-3b20-44b5-b441-85f963e6a25a","Type":"ContainerDied","Data":"c6ba27d47c99f13d015277533c3fba4500d832d05fa4632f9ba24c71c9d9ec8e"} Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.807557 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novacell00cee-account-delete-2rcjp" event={"ID":"7904f7d1-2332-4402-bd0b-4a40f5be43f9","Type":"ContainerStarted","Data":"085e26901b7a5a97a82c0c1a166bdfc809aef264b9df75d2486547fd26e622ca"} Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.812978 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-l99t6" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.813230 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican39b0-account-delete-gptgt" event={"ID":"76f4e4cc-eccd-4c44-a39a-a75c06383d92","Type":"ContainerStarted","Data":"7b5efd228eae4b6a01a28f5817ab8ab8af6f60b4f6db2dd3b93df8009bae97e1"} Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.813344 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.829526 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-7c7848899c-8bm7g" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.854105 4757 scope.go:117] "RemoveContainer" containerID="861e4848c2d8eae37a6ca300adccd71f39a8a6cf0249ed0fa7568d6ab9d744b3" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.901921 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lkv59\" (UniqueName: \"kubernetes.io/projected/ec6e1479-3b96-4c4b-be95-5834172d37ff-kube-api-access-lkv59\") pod \"ec6e1479-3b96-4c4b-be95-5834172d37ff\" (UID: \"ec6e1479-3b96-4c4b-be95-5834172d37ff\") " Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.902001 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec6e1479-3b96-4c4b-be95-5834172d37ff-combined-ca-bundle\") pod \"ec6e1479-3b96-4c4b-be95-5834172d37ff\" (UID: \"ec6e1479-3b96-4c4b-be95-5834172d37ff\") " Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.902057 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ec6e1479-3b96-4c4b-be95-5834172d37ff-run-httpd\") pod \"ec6e1479-3b96-4c4b-be95-5834172d37ff\" (UID: \"ec6e1479-3b96-4c4b-be95-5834172d37ff\") " Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.902708 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ec6e1479-3b96-4c4b-be95-5834172d37ff-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "ec6e1479-3b96-4c4b-be95-5834172d37ff" (UID: "ec6e1479-3b96-4c4b-be95-5834172d37ff"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.902915 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec6e1479-3b96-4c4b-be95-5834172d37ff-config-data\") pod \"ec6e1479-3b96-4c4b-be95-5834172d37ff\" (UID: \"ec6e1479-3b96-4c4b-be95-5834172d37ff\") " Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.903341 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/ec6e1479-3b96-4c4b-be95-5834172d37ff-etc-swift\") pod \"ec6e1479-3b96-4c4b-be95-5834172d37ff\" (UID: \"ec6e1479-3b96-4c4b-be95-5834172d37ff\") " Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.903374 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ec6e1479-3b96-4c4b-be95-5834172d37ff-public-tls-certs\") pod \"ec6e1479-3b96-4c4b-be95-5834172d37ff\" (UID: \"ec6e1479-3b96-4c4b-be95-5834172d37ff\") " Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.903413 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ec6e1479-3b96-4c4b-be95-5834172d37ff-internal-tls-certs\") pod \"ec6e1479-3b96-4c4b-be95-5834172d37ff\" (UID: \"ec6e1479-3b96-4c4b-be95-5834172d37ff\") " Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.903541 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ec6e1479-3b96-4c4b-be95-5834172d37ff-log-httpd\") pod \"ec6e1479-3b96-4c4b-be95-5834172d37ff\" (UID: \"ec6e1479-3b96-4c4b-be95-5834172d37ff\") " Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.905153 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ec6e1479-3b96-4c4b-be95-5834172d37ff-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "ec6e1479-3b96-4c4b-be95-5834172d37ff" (UID: "ec6e1479-3b96-4c4b-be95-5834172d37ff"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.905212 4757 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ec6e1479-3b96-4c4b-be95-5834172d37ff-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.909381 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ec6e1479-3b96-4c4b-be95-5834172d37ff-kube-api-access-lkv59" (OuterVolumeSpecName: "kube-api-access-lkv59") pod "ec6e1479-3b96-4c4b-be95-5834172d37ff" (UID: "ec6e1479-3b96-4c4b-be95-5834172d37ff"). InnerVolumeSpecName "kube-api-access-lkv59". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.909445 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ec6e1479-3b96-4c4b-be95-5834172d37ff-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "ec6e1479-3b96-4c4b-be95-5834172d37ff" (UID: "ec6e1479-3b96-4c4b-be95-5834172d37ff"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.941474 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 06 14:00:22 crc kubenswrapper[4757]: I1006 14:00:22.995464 4757 scope.go:117] "RemoveContainer" containerID="0324863c9c638456508e30d35fe0f9846118f58caedb60fbed5e6e14f91c6b11" Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.000302 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5785d8b947-nsbsm"] Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.007019 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jctd6\" (UniqueName: \"kubernetes.io/projected/61d68e86-89f3-4dc6-bb42-7286c789fbba-kube-api-access-jctd6\") pod \"61d68e86-89f3-4dc6-bb42-7286c789fbba\" (UID: \"61d68e86-89f3-4dc6-bb42-7286c789fbba\") " Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.009334 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/61d68e86-89f3-4dc6-bb42-7286c789fbba-vencrypt-tls-certs\") pod \"61d68e86-89f3-4dc6-bb42-7286c789fbba\" (UID: \"61d68e86-89f3-4dc6-bb42-7286c789fbba\") " Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.009496 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/61d68e86-89f3-4dc6-bb42-7286c789fbba-config-data\") pod \"61d68e86-89f3-4dc6-bb42-7286c789fbba\" (UID: \"61d68e86-89f3-4dc6-bb42-7286c789fbba\") " Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.009525 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61d68e86-89f3-4dc6-bb42-7286c789fbba-combined-ca-bundle\") pod \"61d68e86-89f3-4dc6-bb42-7286c789fbba\" (UID: \"61d68e86-89f3-4dc6-bb42-7286c789fbba\") " Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.009547 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/61d68e86-89f3-4dc6-bb42-7286c789fbba-nova-novncproxy-tls-certs\") pod \"61d68e86-89f3-4dc6-bb42-7286c789fbba\" (UID: \"61d68e86-89f3-4dc6-bb42-7286c789fbba\") " Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.010432 4757 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/ec6e1479-3b96-4c4b-be95-5834172d37ff-etc-swift\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.010457 4757 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ec6e1479-3b96-4c4b-be95-5834172d37ff-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.010467 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lkv59\" (UniqueName: \"kubernetes.io/projected/ec6e1479-3b96-4c4b-be95-5834172d37ff-kube-api-access-lkv59\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:23 crc kubenswrapper[4757]: E1006 14:00:23.010538 4757 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Oct 06 14:00:23 crc kubenswrapper[4757]: E1006 14:00:23.010582 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7-config-data podName:0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7 nodeName:}" failed. No retries permitted until 2025-10-06 14:00:27.010569171 +0000 UTC m=+1315.507887708 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7-config-data") pod "rabbitmq-server-0" (UID: "0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7") : configmap "rabbitmq-config-data" not found Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.015564 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/61d68e86-89f3-4dc6-bb42-7286c789fbba-kube-api-access-jctd6" (OuterVolumeSpecName: "kube-api-access-jctd6") pod "61d68e86-89f3-4dc6-bb42-7286c789fbba" (UID: "61d68e86-89f3-4dc6-bb42-7286c789fbba"). InnerVolumeSpecName "kube-api-access-jctd6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.017230 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5785d8b947-nsbsm"] Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.020485 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.020527 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ec6e1479-3b96-4c4b-be95-5834172d37ff-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "ec6e1479-3b96-4c4b-be95-5834172d37ff" (UID: "ec6e1479-3b96-4c4b-be95-5834172d37ff"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.033307 4757 scope.go:117] "RemoveContainer" containerID="28e913a804b505e4f014857f7b6a17fb5e76553cd58c85ad5dd061ab65fa664b" Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.043940 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ec6e1479-3b96-4c4b-be95-5834172d37ff-config-data" (OuterVolumeSpecName: "config-data") pod "ec6e1479-3b96-4c4b-be95-5834172d37ff" (UID: "ec6e1479-3b96-4c4b-be95-5834172d37ff"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.044327 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-kn7bx"] Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.072894 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-kn7bx"] Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.080937 4757 scope.go:117] "RemoveContainer" containerID="60487956b45ab32166592c2b7eceedfd15fb90339151dc9e4ceaf70a848d1bb5" Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.110558 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovsdbserver-nb-0"] Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.111426 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/39ef4372-3b20-44b5-b441-85f963e6a25a-combined-ca-bundle\") pod \"39ef4372-3b20-44b5-b441-85f963e6a25a\" (UID: \"39ef4372-3b20-44b5-b441-85f963e6a25a\") " Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.111497 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/39ef4372-3b20-44b5-b441-85f963e6a25a-kolla-config\") pod \"39ef4372-3b20-44b5-b441-85f963e6a25a\" (UID: \"39ef4372-3b20-44b5-b441-85f963e6a25a\") " Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.111520 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/39ef4372-3b20-44b5-b441-85f963e6a25a-secrets\") pod \"39ef4372-3b20-44b5-b441-85f963e6a25a\" (UID: \"39ef4372-3b20-44b5-b441-85f963e6a25a\") " Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.111855 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/39ef4372-3b20-44b5-b441-85f963e6a25a-galera-tls-certs\") pod \"39ef4372-3b20-44b5-b441-85f963e6a25a\" (UID: \"39ef4372-3b20-44b5-b441-85f963e6a25a\") " Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.111899 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"39ef4372-3b20-44b5-b441-85f963e6a25a\" (UID: \"39ef4372-3b20-44b5-b441-85f963e6a25a\") " Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.111991 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/39ef4372-3b20-44b5-b441-85f963e6a25a-operator-scripts\") pod \"39ef4372-3b20-44b5-b441-85f963e6a25a\" (UID: \"39ef4372-3b20-44b5-b441-85f963e6a25a\") " Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.112023 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/39ef4372-3b20-44b5-b441-85f963e6a25a-config-data-generated\") pod \"39ef4372-3b20-44b5-b441-85f963e6a25a\" (UID: \"39ef4372-3b20-44b5-b441-85f963e6a25a\") " Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.112072 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/39ef4372-3b20-44b5-b441-85f963e6a25a-config-data-default\") pod \"39ef4372-3b20-44b5-b441-85f963e6a25a\" (UID: \"39ef4372-3b20-44b5-b441-85f963e6a25a\") " Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.112119 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j2w4w\" (UniqueName: \"kubernetes.io/projected/39ef4372-3b20-44b5-b441-85f963e6a25a-kube-api-access-j2w4w\") pod \"39ef4372-3b20-44b5-b441-85f963e6a25a\" (UID: \"39ef4372-3b20-44b5-b441-85f963e6a25a\") " Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.113425 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jctd6\" (UniqueName: \"kubernetes.io/projected/61d68e86-89f3-4dc6-bb42-7286c789fbba-kube-api-access-jctd6\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.113453 4757 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec6e1479-3b96-4c4b-be95-5834172d37ff-config-data\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.113466 4757 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ec6e1479-3b96-4c4b-be95-5834172d37ff-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.114470 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/39ef4372-3b20-44b5-b441-85f963e6a25a-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "39ef4372-3b20-44b5-b441-85f963e6a25a" (UID: "39ef4372-3b20-44b5-b441-85f963e6a25a"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.116068 4757 scope.go:117] "RemoveContainer" containerID="a12b1909659d9dffd84421e8aa816dc9c5582d96e48bf6ee24c272907e439897" Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.116661 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/39ef4372-3b20-44b5-b441-85f963e6a25a-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "39ef4372-3b20-44b5-b441-85f963e6a25a" (UID: "39ef4372-3b20-44b5-b441-85f963e6a25a"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.117675 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/39ef4372-3b20-44b5-b441-85f963e6a25a-secrets" (OuterVolumeSpecName: "secrets") pod "39ef4372-3b20-44b5-b441-85f963e6a25a" (UID: "39ef4372-3b20-44b5-b441-85f963e6a25a"). InnerVolumeSpecName "secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.119574 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/39ef4372-3b20-44b5-b441-85f963e6a25a-kube-api-access-j2w4w" (OuterVolumeSpecName: "kube-api-access-j2w4w") pod "39ef4372-3b20-44b5-b441-85f963e6a25a" (UID: "39ef4372-3b20-44b5-b441-85f963e6a25a"). InnerVolumeSpecName "kube-api-access-j2w4w". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.120464 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/39ef4372-3b20-44b5-b441-85f963e6a25a-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "39ef4372-3b20-44b5-b441-85f963e6a25a" (UID: "39ef4372-3b20-44b5-b441-85f963e6a25a"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.123475 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/39ef4372-3b20-44b5-b441-85f963e6a25a-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "39ef4372-3b20-44b5-b441-85f963e6a25a" (UID: "39ef4372-3b20-44b5-b441-85f963e6a25a"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.137864 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovsdbserver-nb-0"] Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.148728 4757 scope.go:117] "RemoveContainer" containerID="7f6ef039c9a56166775bfea66933d67e543e08d8ece459c480259808a1dad4e8" Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.155575 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.166028 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.173601 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.178055 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage03-crc" (OuterVolumeSpecName: "mysql-db") pod "39ef4372-3b20-44b5-b441-85f963e6a25a" (UID: "39ef4372-3b20-44b5-b441-85f963e6a25a"). InnerVolumeSpecName "local-storage03-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.185137 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovsdbserver-sb-0"] Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.191399 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovsdbserver-sb-0"] Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.200140 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-metrics-l99t6"] Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.204354 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-metrics-l99t6"] Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.214986 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a274a347-259a-4919-8326-8047df9b0de8-scripts\") pod \"a274a347-259a-4919-8326-8047df9b0de8\" (UID: \"a274a347-259a-4919-8326-8047df9b0de8\") " Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.215082 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a274a347-259a-4919-8326-8047df9b0de8-config-data\") pod \"a274a347-259a-4919-8326-8047df9b0de8\" (UID: \"a274a347-259a-4919-8326-8047df9b0de8\") " Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.215166 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a274a347-259a-4919-8326-8047df9b0de8-internal-tls-certs\") pod \"a274a347-259a-4919-8326-8047df9b0de8\" (UID: \"a274a347-259a-4919-8326-8047df9b0de8\") " Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.215219 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8f4hk\" (UniqueName: \"kubernetes.io/projected/a274a347-259a-4919-8326-8047df9b0de8-kube-api-access-8f4hk\") pod \"a274a347-259a-4919-8326-8047df9b0de8\" (UID: \"a274a347-259a-4919-8326-8047df9b0de8\") " Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.215271 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a274a347-259a-4919-8326-8047df9b0de8-config-data-custom\") pod \"a274a347-259a-4919-8326-8047df9b0de8\" (UID: \"a274a347-259a-4919-8326-8047df9b0de8\") " Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.215295 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a274a347-259a-4919-8326-8047df9b0de8-public-tls-certs\") pod \"a274a347-259a-4919-8326-8047df9b0de8\" (UID: \"a274a347-259a-4919-8326-8047df9b0de8\") " Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.215354 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a274a347-259a-4919-8326-8047df9b0de8-logs\") pod \"a274a347-259a-4919-8326-8047df9b0de8\" (UID: \"a274a347-259a-4919-8326-8047df9b0de8\") " Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.215383 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a274a347-259a-4919-8326-8047df9b0de8-combined-ca-bundle\") pod \"a274a347-259a-4919-8326-8047df9b0de8\" (UID: \"a274a347-259a-4919-8326-8047df9b0de8\") " Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.215466 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a274a347-259a-4919-8326-8047df9b0de8-etc-machine-id\") pod \"a274a347-259a-4919-8326-8047df9b0de8\" (UID: \"a274a347-259a-4919-8326-8047df9b0de8\") " Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.215936 4757 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" " Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.215956 4757 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/39ef4372-3b20-44b5-b441-85f963e6a25a-operator-scripts\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.215967 4757 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/39ef4372-3b20-44b5-b441-85f963e6a25a-config-data-generated\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.215977 4757 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/39ef4372-3b20-44b5-b441-85f963e6a25a-config-data-default\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.215987 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j2w4w\" (UniqueName: \"kubernetes.io/projected/39ef4372-3b20-44b5-b441-85f963e6a25a-kube-api-access-j2w4w\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.215996 4757 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/39ef4372-3b20-44b5-b441-85f963e6a25a-kolla-config\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.216006 4757 reconciler_common.go:293] "Volume detached for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/39ef4372-3b20-44b5-b441-85f963e6a25a-secrets\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.216590 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a274a347-259a-4919-8326-8047df9b0de8-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "a274a347-259a-4919-8326-8047df9b0de8" (UID: "a274a347-259a-4919-8326-8047df9b0de8"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.218019 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a274a347-259a-4919-8326-8047df9b0de8-logs" (OuterVolumeSpecName: "logs") pod "a274a347-259a-4919-8326-8047df9b0de8" (UID: "a274a347-259a-4919-8326-8047df9b0de8"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.272743 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.273159 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="542090f5-d2d8-4f78-b566-10e9885c341e" containerName="proxy-httpd" containerID="cri-o://85aed47e17be02089454ae709bb98e02a653cd57a389079af7c1e794005db080" gracePeriod=30 Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.273233 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="542090f5-d2d8-4f78-b566-10e9885c341e" containerName="ceilometer-notification-agent" containerID="cri-o://bb3ce5af963b7888d8a2f2bac501984a31d2cf21692061032094f06beedecf66" gracePeriod=30 Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.273182 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="542090f5-d2d8-4f78-b566-10e9885c341e" containerName="sg-core" containerID="cri-o://d10bb205702b34e04df8f3d4dd7f8051ac798a998c12a2c61384341390d931e9" gracePeriod=30 Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.274433 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="542090f5-d2d8-4f78-b566-10e9885c341e" containerName="ceilometer-central-agent" containerID="cri-o://d0a1424bce5344057fa545a1be5e25b8f2ea9bb5d886078a67599c0c2c3b96eb" gracePeriod=30 Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.275116 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a274a347-259a-4919-8326-8047df9b0de8-kube-api-access-8f4hk" (OuterVolumeSpecName: "kube-api-access-8f4hk") pod "a274a347-259a-4919-8326-8047df9b0de8" (UID: "a274a347-259a-4919-8326-8047df9b0de8"). InnerVolumeSpecName "kube-api-access-8f4hk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.322490 4757 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a274a347-259a-4919-8326-8047df9b0de8-etc-machine-id\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.322519 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8f4hk\" (UniqueName: \"kubernetes.io/projected/a274a347-259a-4919-8326-8047df9b0de8-kube-api-access-8f4hk\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.322529 4757 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a274a347-259a-4919-8326-8047df9b0de8-logs\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.323219 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.323415 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="c5b34617-b163-4b7a-9950-53f64a8cca2c" containerName="kube-state-metrics" containerID="cri-o://947f0a3b15f660d4a027bf43a3fba73c9916d632976aead40013fc2ec14335b9" gracePeriod=30 Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.323734 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a274a347-259a-4919-8326-8047df9b0de8-scripts" (OuterVolumeSpecName: "scripts") pod "a274a347-259a-4919-8326-8047df9b0de8" (UID: "a274a347-259a-4919-8326-8047df9b0de8"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.324320 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a274a347-259a-4919-8326-8047df9b0de8-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "a274a347-259a-4919-8326-8047df9b0de8" (UID: "a274a347-259a-4919-8326-8047df9b0de8"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.368921 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/memcached-0"] Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.369118 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/memcached-0" podUID="a21e8c5a-5819-4e8c-9b20-5353625fc36b" containerName="memcached" containerID="cri-o://ff662bc600af80633dbf9ed1780d1a31e3db1b29ac07bbca32eb0ee8886ef72f" gracePeriod=30 Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.392694 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ec6e1479-3b96-4c4b-be95-5834172d37ff-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ec6e1479-3b96-4c4b-be95-5834172d37ff" (UID: "ec6e1479-3b96-4c4b-be95-5834172d37ff"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 14:00:23 crc kubenswrapper[4757]: E1006 14:00:23.392581 4757 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="9ce79a47b7eef2fc901e662389fe9df01b87649a04e94bc2a16eff829fe1027b" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.407154 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/61d68e86-89f3-4dc6-bb42-7286c789fbba-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "61d68e86-89f3-4dc6-bb42-7286c789fbba" (UID: "61d68e86-89f3-4dc6-bb42-7286c789fbba"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 14:00:23 crc kubenswrapper[4757]: E1006 14:00:23.410697 4757 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="9ce79a47b7eef2fc901e662389fe9df01b87649a04e94bc2a16eff829fe1027b" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.428742 4757 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61d68e86-89f3-4dc6-bb42-7286c789fbba-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.429049 4757 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a274a347-259a-4919-8326-8047df9b0de8-config-data-custom\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.429060 4757 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec6e1479-3b96-4c4b-be95-5834172d37ff-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.429070 4757 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a274a347-259a-4919-8326-8047df9b0de8-scripts\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:23 crc kubenswrapper[4757]: E1006 14:00:23.433366 4757 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="9ce79a47b7eef2fc901e662389fe9df01b87649a04e94bc2a16eff829fe1027b" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Oct 06 14:00:23 crc kubenswrapper[4757]: E1006 14:00:23.433591 4757 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-cell0-conductor-0" podUID="924963ee-1194-4d98-84d7-9bb3e426f7bc" containerName="nova-cell0-conductor-conductor" Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.441290 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-ms6bn"] Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.454167 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-v77kw"] Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.466801 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-ms6bn"] Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.472333 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-v77kw"] Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.487210 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone2941-account-delete-kjhx5"] Oct 06 14:00:23 crc kubenswrapper[4757]: E1006 14:00:23.487609 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47e3132d-b23b-47f2-b26e-5511df70deec" containerName="ovsdbserver-nb" Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.487628 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="47e3132d-b23b-47f2-b26e-5511df70deec" containerName="ovsdbserver-nb" Oct 06 14:00:23 crc kubenswrapper[4757]: E1006 14:00:23.487645 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="61d68e86-89f3-4dc6-bb42-7286c789fbba" containerName="nova-cell1-novncproxy-novncproxy" Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.487654 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="61d68e86-89f3-4dc6-bb42-7286c789fbba" containerName="nova-cell1-novncproxy-novncproxy" Oct 06 14:00:23 crc kubenswrapper[4757]: E1006 14:00:23.487664 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a274a347-259a-4919-8326-8047df9b0de8" containerName="cinder-api-log" Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.487670 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="a274a347-259a-4919-8326-8047df9b0de8" containerName="cinder-api-log" Oct 06 14:00:23 crc kubenswrapper[4757]: E1006 14:00:23.487687 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f8e78f45-d6b0-44d0-b84f-30ac3538ec3c" containerName="dnsmasq-dns" Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.487692 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="f8e78f45-d6b0-44d0-b84f-30ac3538ec3c" containerName="dnsmasq-dns" Oct 06 14:00:23 crc kubenswrapper[4757]: E1006 14:00:23.487703 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ec6e1479-3b96-4c4b-be95-5834172d37ff" containerName="proxy-server" Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.487709 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="ec6e1479-3b96-4c4b-be95-5834172d37ff" containerName="proxy-server" Oct 06 14:00:23 crc kubenswrapper[4757]: E1006 14:00:23.487725 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ec6e1479-3b96-4c4b-be95-5834172d37ff" containerName="proxy-httpd" Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.487732 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="ec6e1479-3b96-4c4b-be95-5834172d37ff" containerName="proxy-httpd" Oct 06 14:00:23 crc kubenswrapper[4757]: E1006 14:00:23.487745 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f38ce569-0a5c-408d-9d44-bb953d38e24e" containerName="openstack-network-exporter" Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.487751 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="f38ce569-0a5c-408d-9d44-bb953d38e24e" containerName="openstack-network-exporter" Oct 06 14:00:23 crc kubenswrapper[4757]: E1006 14:00:23.487760 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="39ef4372-3b20-44b5-b441-85f963e6a25a" containerName="galera" Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.487766 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="39ef4372-3b20-44b5-b441-85f963e6a25a" containerName="galera" Oct 06 14:00:23 crc kubenswrapper[4757]: E1006 14:00:23.487774 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="39ef4372-3b20-44b5-b441-85f963e6a25a" containerName="mysql-bootstrap" Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.487780 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="39ef4372-3b20-44b5-b441-85f963e6a25a" containerName="mysql-bootstrap" Oct 06 14:00:23 crc kubenswrapper[4757]: E1006 14:00:23.487787 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="780fa947-0a68-4231-b5b3-e0cad80204d2" containerName="ovsdbserver-sb" Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.487793 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="780fa947-0a68-4231-b5b3-e0cad80204d2" containerName="ovsdbserver-sb" Oct 06 14:00:23 crc kubenswrapper[4757]: E1006 14:00:23.487802 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0efda247-fa18-49db-a37d-1dd28d999ed7" containerName="ovn-controller" Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.487807 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="0efda247-fa18-49db-a37d-1dd28d999ed7" containerName="ovn-controller" Oct 06 14:00:23 crc kubenswrapper[4757]: E1006 14:00:23.487819 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f8e78f45-d6b0-44d0-b84f-30ac3538ec3c" containerName="init" Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.487825 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="f8e78f45-d6b0-44d0-b84f-30ac3538ec3c" containerName="init" Oct 06 14:00:23 crc kubenswrapper[4757]: E1006 14:00:23.487832 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5a2f1fd8-9191-40cd-9edb-6a681214aaa9" containerName="cinder-scheduler" Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.487838 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="5a2f1fd8-9191-40cd-9edb-6a681214aaa9" containerName="cinder-scheduler" Oct 06 14:00:23 crc kubenswrapper[4757]: E1006 14:00:23.487849 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a274a347-259a-4919-8326-8047df9b0de8" containerName="cinder-api" Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.487863 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="a274a347-259a-4919-8326-8047df9b0de8" containerName="cinder-api" Oct 06 14:00:23 crc kubenswrapper[4757]: E1006 14:00:23.487872 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5a2f1fd8-9191-40cd-9edb-6a681214aaa9" containerName="probe" Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.487877 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="5a2f1fd8-9191-40cd-9edb-6a681214aaa9" containerName="probe" Oct 06 14:00:23 crc kubenswrapper[4757]: E1006 14:00:23.487885 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47e3132d-b23b-47f2-b26e-5511df70deec" containerName="openstack-network-exporter" Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.487891 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="47e3132d-b23b-47f2-b26e-5511df70deec" containerName="openstack-network-exporter" Oct 06 14:00:23 crc kubenswrapper[4757]: E1006 14:00:23.487901 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="780fa947-0a68-4231-b5b3-e0cad80204d2" containerName="openstack-network-exporter" Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.487906 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="780fa947-0a68-4231-b5b3-e0cad80204d2" containerName="openstack-network-exporter" Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.488063 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="ec6e1479-3b96-4c4b-be95-5834172d37ff" containerName="proxy-httpd" Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.488074 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="780fa947-0a68-4231-b5b3-e0cad80204d2" containerName="ovsdbserver-sb" Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.488086 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="a274a347-259a-4919-8326-8047df9b0de8" containerName="cinder-api" Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.488111 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="0efda247-fa18-49db-a37d-1dd28d999ed7" containerName="ovn-controller" Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.488120 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="a274a347-259a-4919-8326-8047df9b0de8" containerName="cinder-api-log" Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.488135 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="ec6e1479-3b96-4c4b-be95-5834172d37ff" containerName="proxy-server" Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.488144 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="61d68e86-89f3-4dc6-bb42-7286c789fbba" containerName="nova-cell1-novncproxy-novncproxy" Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.488150 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="5a2f1fd8-9191-40cd-9edb-6a681214aaa9" containerName="cinder-scheduler" Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.488161 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="780fa947-0a68-4231-b5b3-e0cad80204d2" containerName="openstack-network-exporter" Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.488170 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="39ef4372-3b20-44b5-b441-85f963e6a25a" containerName="galera" Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.488179 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="5a2f1fd8-9191-40cd-9edb-6a681214aaa9" containerName="probe" Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.488188 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="47e3132d-b23b-47f2-b26e-5511df70deec" containerName="ovsdbserver-nb" Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.488197 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="f8e78f45-d6b0-44d0-b84f-30ac3538ec3c" containerName="dnsmasq-dns" Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.488209 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="f38ce569-0a5c-408d-9d44-bb953d38e24e" containerName="openstack-network-exporter" Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.488219 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="47e3132d-b23b-47f2-b26e-5511df70deec" containerName="openstack-network-exporter" Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.492444 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ec6e1479-3b96-4c4b-be95-5834172d37ff-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "ec6e1479-3b96-4c4b-be95-5834172d37ff" (UID: "ec6e1479-3b96-4c4b-be95-5834172d37ff"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.492762 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone2941-account-delete-kjhx5" Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.493690 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone2941-account-delete-kjhx5"] Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.501932 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstack-galera-0"] Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.507541 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a274a347-259a-4919-8326-8047df9b0de8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a274a347-259a-4919-8326-8047df9b0de8" (UID: "a274a347-259a-4919-8326-8047df9b0de8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.507887 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-678769d845-d782m"] Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.508212 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/keystone-678769d845-d782m" podUID="80bdd62a-4024-4734-9ca0-a97f2bae29c3" containerName="keystone-api" containerID="cri-o://31fe86cb318306616d39850b2094828652e5181bae98c6563c8e5d3d8c3552ab" gracePeriod=30 Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.516291 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-7hclp"] Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.531004 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fpk6k\" (UniqueName: \"kubernetes.io/projected/df034a5b-6848-45e4-9e1b-8613a9ab590f-kube-api-access-fpk6k\") pod \"keystone2941-account-delete-kjhx5\" (UID: \"df034a5b-6848-45e4-9e1b-8613a9ab590f\") " pod="openstack/keystone2941-account-delete-kjhx5" Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.531104 4757 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ec6e1479-3b96-4c4b-be95-5834172d37ff-public-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.531118 4757 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a274a347-259a-4919-8326-8047df9b0de8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.527691 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/61d68e86-89f3-4dc6-bb42-7286c789fbba-nova-novncproxy-tls-certs" (OuterVolumeSpecName: "nova-novncproxy-tls-certs") pod "61d68e86-89f3-4dc6-bb42-7286c789fbba" (UID: "61d68e86-89f3-4dc6-bb42-7286c789fbba"). InnerVolumeSpecName "nova-novncproxy-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 14:00:23 crc kubenswrapper[4757]: E1006 14:00:23.531182 4757 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Oct 06 14:00:23 crc kubenswrapper[4757]: E1006 14:00:23.531220 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61-config-data podName:cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61 nodeName:}" failed. No retries permitted until 2025-10-06 14:00:27.53120725 +0000 UTC m=+1316.028525787 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61-config-data") pod "rabbitmq-cell1-server-0" (UID: "cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61") : configmap "rabbitmq-cell1-config-data" not found Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.547286 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-7hclp"] Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.554209 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-2941-account-create-7cmrj"] Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.566752 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-2941-account-create-7cmrj"] Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.573430 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/61d68e86-89f3-4dc6-bb42-7286c789fbba-config-data" (OuterVolumeSpecName: "config-data") pod "61d68e86-89f3-4dc6-bb42-7286c789fbba" (UID: "61d68e86-89f3-4dc6-bb42-7286c789fbba"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.579797 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/39ef4372-3b20-44b5-b441-85f963e6a25a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "39ef4372-3b20-44b5-b441-85f963e6a25a" (UID: "39ef4372-3b20-44b5-b441-85f963e6a25a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.593239 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone2941-account-delete-kjhx5"] Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.614782 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/39ef4372-3b20-44b5-b441-85f963e6a25a-galera-tls-certs" (OuterVolumeSpecName: "galera-tls-certs") pod "39ef4372-3b20-44b5-b441-85f963e6a25a" (UID: "39ef4372-3b20-44b5-b441-85f963e6a25a"). InnerVolumeSpecName "galera-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.621001 4757 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage03-crc" (UniqueName: "kubernetes.io/local-volume/local-storage03-crc") on node "crc" Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.626635 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a274a347-259a-4919-8326-8047df9b0de8-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "a274a347-259a-4919-8326-8047df9b0de8" (UID: "a274a347-259a-4919-8326-8047df9b0de8"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.634031 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fpk6k\" (UniqueName: \"kubernetes.io/projected/df034a5b-6848-45e4-9e1b-8613a9ab590f-kube-api-access-fpk6k\") pod \"keystone2941-account-delete-kjhx5\" (UID: \"df034a5b-6848-45e4-9e1b-8613a9ab590f\") " pod="openstack/keystone2941-account-delete-kjhx5" Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.634320 4757 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/39ef4372-3b20-44b5-b441-85f963e6a25a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.634337 4757 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/61d68e86-89f3-4dc6-bb42-7286c789fbba-config-data\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.634350 4757 reconciler_common.go:293] "Volume detached for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/39ef4372-3b20-44b5-b441-85f963e6a25a-galera-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.634361 4757 reconciler_common.go:293] "Volume detached for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.634372 4757 reconciler_common.go:293] "Volume detached for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/61d68e86-89f3-4dc6-bb42-7286c789fbba-nova-novncproxy-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.634383 4757 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a274a347-259a-4919-8326-8047df9b0de8-public-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:23 crc kubenswrapper[4757]: E1006 14:00:23.642754 4757 projected.go:194] Error preparing data for projected volume kube-api-access-fpk6k for pod openstack/keystone2941-account-delete-kjhx5: failed to fetch token: serviceaccounts "galera-openstack" not found Oct 06 14:00:23 crc kubenswrapper[4757]: E1006 14:00:23.642854 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/df034a5b-6848-45e4-9e1b-8613a9ab590f-kube-api-access-fpk6k podName:df034a5b-6848-45e4-9e1b-8613a9ab590f nodeName:}" failed. No retries permitted until 2025-10-06 14:00:24.142831948 +0000 UTC m=+1312.640150585 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-fpk6k" (UniqueName: "kubernetes.io/projected/df034a5b-6848-45e4-9e1b-8613a9ab590f-kube-api-access-fpk6k") pod "keystone2941-account-delete-kjhx5" (UID: "df034a5b-6848-45e4-9e1b-8613a9ab590f") : failed to fetch token: serviceaccounts "galera-openstack" not found Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.662561 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a274a347-259a-4919-8326-8047df9b0de8-config-data" (OuterVolumeSpecName: "config-data") pod "a274a347-259a-4919-8326-8047df9b0de8" (UID: "a274a347-259a-4919-8326-8047df9b0de8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.693048 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/61d68e86-89f3-4dc6-bb42-7286c789fbba-vencrypt-tls-certs" (OuterVolumeSpecName: "vencrypt-tls-certs") pod "61d68e86-89f3-4dc6-bb42-7286c789fbba" (UID: "61d68e86-89f3-4dc6-bb42-7286c789fbba"). InnerVolumeSpecName "vencrypt-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.722270 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/openstack-galera-0" podUID="1489eff7-41ff-420a-bce0-14247f8554ee" containerName="galera" containerID="cri-o://68027503c2ce696a23bbe79112eb3c8e940f784eaa625ef9ab155311edc1e2dc" gracePeriod=30 Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.729832 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a274a347-259a-4919-8326-8047df9b0de8-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "a274a347-259a-4919-8326-8047df9b0de8" (UID: "a274a347-259a-4919-8326-8047df9b0de8"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.735668 4757 reconciler_common.go:293] "Volume detached for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/61d68e86-89f3-4dc6-bb42-7286c789fbba-vencrypt-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.735695 4757 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a274a347-259a-4919-8326-8047df9b0de8-config-data\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.735705 4757 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a274a347-259a-4919-8326-8047df9b0de8-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.832399 4757 generic.go:334] "Generic (PLEG): container finished" podID="c5b34617-b163-4b7a-9950-53f64a8cca2c" containerID="947f0a3b15f660d4a027bf43a3fba73c9916d632976aead40013fc2ec14335b9" exitCode=2 Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.832465 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"c5b34617-b163-4b7a-9950-53f64a8cca2c","Type":"ContainerDied","Data":"947f0a3b15f660d4a027bf43a3fba73c9916d632976aead40013fc2ec14335b9"} Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.843690 4757 generic.go:334] "Generic (PLEG): container finished" podID="542090f5-d2d8-4f78-b566-10e9885c341e" containerID="85aed47e17be02089454ae709bb98e02a653cd57a389079af7c1e794005db080" exitCode=0 Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.843719 4757 generic.go:334] "Generic (PLEG): container finished" podID="542090f5-d2d8-4f78-b566-10e9885c341e" containerID="d10bb205702b34e04df8f3d4dd7f8051ac798a998c12a2c61384341390d931e9" exitCode=2 Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.843739 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"542090f5-d2d8-4f78-b566-10e9885c341e","Type":"ContainerDied","Data":"85aed47e17be02089454ae709bb98e02a653cd57a389079af7c1e794005db080"} Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.843768 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"542090f5-d2d8-4f78-b566-10e9885c341e","Type":"ContainerDied","Data":"d10bb205702b34e04df8f3d4dd7f8051ac798a998c12a2c61384341390d931e9"} Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.848928 4757 generic.go:334] "Generic (PLEG): container finished" podID="24ed1753-25de-4bde-8158-52cb7dd6a2f1" containerID="5d6cb1e28dd3b8e90a9d51e67db5c48ba545c42e1a58671c600c773ba7df9cc4" exitCode=0 Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.848987 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novaapid1d1-account-delete-j7wzz" event={"ID":"24ed1753-25de-4bde-8158-52cb7dd6a2f1","Type":"ContainerDied","Data":"5d6cb1e28dd3b8e90a9d51e67db5c48ba545c42e1a58671c600c773ba7df9cc4"} Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.851307 4757 generic.go:334] "Generic (PLEG): container finished" podID="dffbfc0a-c4ec-41cc-873b-552bc6b7fa69" containerID="a56cbf1938af1879aefb87824c12e066c26f7d83d53119aca07be4518945693f" exitCode=0 Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.851350 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron2bb5-account-delete-r8kww" event={"ID":"dffbfc0a-c4ec-41cc-873b-552bc6b7fa69","Type":"ContainerDied","Data":"a56cbf1938af1879aefb87824c12e066c26f7d83d53119aca07be4518945693f"} Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.856187 4757 generic.go:334] "Generic (PLEG): container finished" podID="a274a347-259a-4919-8326-8047df9b0de8" containerID="66adf9671d94409c8d526ef54e28136551e0537951eeabe62b8c147ef1d852be" exitCode=0 Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.856228 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"a274a347-259a-4919-8326-8047df9b0de8","Type":"ContainerDied","Data":"66adf9671d94409c8d526ef54e28136551e0537951eeabe62b8c147ef1d852be"} Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.856246 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"a274a347-259a-4919-8326-8047df9b0de8","Type":"ContainerDied","Data":"b3b1c2ceaa596a855a2c3c2e0bd1b47ddd880cfeea2a9ddb6672bde046d8748b"} Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.856263 4757 scope.go:117] "RemoveContainer" containerID="66adf9671d94409c8d526ef54e28136551e0537951eeabe62b8c147ef1d852be" Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.856343 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.873457 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novacell00cee-account-delete-2rcjp" event={"ID":"7904f7d1-2332-4402-bd0b-4a40f5be43f9","Type":"ContainerStarted","Data":"df8d26e861507a6ea9c35de1268390d49242824b4f8f3a8fc3d5c38f8370d3dd"} Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.877064 4757 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openstack/novacell00cee-account-delete-2rcjp" secret="" err="secret \"galera-openstack-dockercfg-7gzsb\" not found" Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.886234 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican39b0-account-delete-gptgt" event={"ID":"76f4e4cc-eccd-4c44-a39a-a75c06383d92","Type":"ContainerStarted","Data":"e439390db414be42f02cd6bd446ea6945ddb128a94c3debceb08e055dd6f1eed"} Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.886371 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican39b0-account-delete-gptgt" podUID="76f4e4cc-eccd-4c44-a39a-a75c06383d92" containerName="mariadb-account-delete" containerID="cri-o://e439390db414be42f02cd6bd446ea6945ddb128a94c3debceb08e055dd6f1eed" gracePeriod=30 Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.893230 4757 generic.go:334] "Generic (PLEG): container finished" podID="a21e8c5a-5819-4e8c-9b20-5353625fc36b" containerID="ff662bc600af80633dbf9ed1780d1a31e3db1b29ac07bbca32eb0ee8886ef72f" exitCode=0 Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.893293 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"a21e8c5a-5819-4e8c-9b20-5353625fc36b","Type":"ContainerDied","Data":"ff662bc600af80633dbf9ed1780d1a31e3db1b29ac07bbca32eb0ee8886ef72f"} Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.895685 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.897406 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"39ef4372-3b20-44b5-b441-85f963e6a25a","Type":"ContainerDied","Data":"ca577a70da94d941fb8d276731361db62ddf49abe4ce555e0dc37e838eaa242e"} Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.897513 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.897533 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-7c7848899c-8bm7g" Oct 06 14:00:23 crc kubenswrapper[4757]: I1006 14:00:23.906076 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican39b0-account-delete-gptgt" podStartSLOduration=4.90606281 podStartE2EDuration="4.90606281s" podCreationTimestamp="2025-10-06 14:00:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 14:00:23.9019813 +0000 UTC m=+1312.399299837" watchObservedRunningTime="2025-10-06 14:00:23.90606281 +0000 UTC m=+1312.403381347" Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.044850 4757 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="562413a3-660e-4ed9-92d6-23cb7d84b936" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.204:8775/\": read tcp 10.217.0.2:45644->10.217.0.204:8775: read: connection reset by peer" Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.046366 4757 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="562413a3-660e-4ed9-92d6-23cb7d84b936" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.204:8775/\": read tcp 10.217.0.2:45648->10.217.0.204:8775: read: connection reset by peer" Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.071777 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 06 14:00:24 crc kubenswrapper[4757]: E1006 14:00:24.072352 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[kube-api-access-fpk6k], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/keystone2941-account-delete-kjhx5" podUID="df034a5b-6848-45e4-9e1b-8613a9ab590f" Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.078072 4757 scope.go:117] "RemoveContainer" containerID="56539da0d904647a7143dee780f395e8c568df58a690438ce63ea5e4176ade53" Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.102037 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/novacell00cee-account-delete-2rcjp" podStartSLOduration=5.102016383 podStartE2EDuration="5.102016383s" podCreationTimestamp="2025-10-06 14:00:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 14:00:23.93236484 +0000 UTC m=+1312.429683397" watchObservedRunningTime="2025-10-06 14:00:24.102016383 +0000 UTC m=+1312.599334930" Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.141798 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.142315 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8lpcc\" (UniqueName: \"kubernetes.io/projected/c5b34617-b163-4b7a-9950-53f64a8cca2c-kube-api-access-8lpcc\") pod \"c5b34617-b163-4b7a-9950-53f64a8cca2c\" (UID: \"c5b34617-b163-4b7a-9950-53f64a8cca2c\") " Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.144035 4757 scope.go:117] "RemoveContainer" containerID="66adf9671d94409c8d526ef54e28136551e0537951eeabe62b8c147ef1d852be" Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.144160 4757 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="542090f5-d2d8-4f78-b566-10e9885c341e" containerName="proxy-httpd" probeResult="failure" output="Get \"https://10.217.0.199:3000/\": dial tcp 10.217.0.199:3000: connect: connection refused" Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.145434 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/c5b34617-b163-4b7a-9950-53f64a8cca2c-kube-state-metrics-tls-certs\") pod \"c5b34617-b163-4b7a-9950-53f64a8cca2c\" (UID: \"c5b34617-b163-4b7a-9950-53f64a8cca2c\") " Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.145675 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/c5b34617-b163-4b7a-9950-53f64a8cca2c-kube-state-metrics-tls-config\") pod \"c5b34617-b163-4b7a-9950-53f64a8cca2c\" (UID: \"c5b34617-b163-4b7a-9950-53f64a8cca2c\") " Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.145839 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c5b34617-b163-4b7a-9950-53f64a8cca2c-combined-ca-bundle\") pod \"c5b34617-b163-4b7a-9950-53f64a8cca2c\" (UID: \"c5b34617-b163-4b7a-9950-53f64a8cca2c\") " Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.146486 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fpk6k\" (UniqueName: \"kubernetes.io/projected/df034a5b-6848-45e4-9e1b-8613a9ab590f-kube-api-access-fpk6k\") pod \"keystone2941-account-delete-kjhx5\" (UID: \"df034a5b-6848-45e4-9e1b-8613a9ab590f\") " pod="openstack/keystone2941-account-delete-kjhx5" Oct 06 14:00:24 crc kubenswrapper[4757]: E1006 14:00:24.145844 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"66adf9671d94409c8d526ef54e28136551e0537951eeabe62b8c147ef1d852be\": container with ID starting with 66adf9671d94409c8d526ef54e28136551e0537951eeabe62b8c147ef1d852be not found: ID does not exist" containerID="66adf9671d94409c8d526ef54e28136551e0537951eeabe62b8c147ef1d852be" Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.147444 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"66adf9671d94409c8d526ef54e28136551e0537951eeabe62b8c147ef1d852be"} err="failed to get container status \"66adf9671d94409c8d526ef54e28136551e0537951eeabe62b8c147ef1d852be\": rpc error: code = NotFound desc = could not find container \"66adf9671d94409c8d526ef54e28136551e0537951eeabe62b8c147ef1d852be\": container with ID starting with 66adf9671d94409c8d526ef54e28136551e0537951eeabe62b8c147ef1d852be not found: ID does not exist" Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.147597 4757 scope.go:117] "RemoveContainer" containerID="56539da0d904647a7143dee780f395e8c568df58a690438ce63ea5e4176ade53" Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.145755 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c5b34617-b163-4b7a-9950-53f64a8cca2c-kube-api-access-8lpcc" (OuterVolumeSpecName: "kube-api-access-8lpcc") pod "c5b34617-b163-4b7a-9950-53f64a8cca2c" (UID: "c5b34617-b163-4b7a-9950-53f64a8cca2c"). InnerVolumeSpecName "kube-api-access-8lpcc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 14:00:24 crc kubenswrapper[4757]: E1006 14:00:24.155049 4757 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod542090f5_d2d8_4f78_b566_10e9885c341e.slice/crio-conmon-85aed47e17be02089454ae709bb98e02a653cd57a389079af7c1e794005db080.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod24ed1753_25de_4bde_8158_52cb7dd6a2f1.slice/crio-conmon-5d6cb1e28dd3b8e90a9d51e67db5c48ba545c42e1a58671c600c773ba7df9cc4.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddffbfc0a_c4ec_41cc_873b_552bc6b7fa69.slice/crio-a56cbf1938af1879aefb87824c12e066c26f7d83d53119aca07be4518945693f.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod542090f5_d2d8_4f78_b566_10e9885c341e.slice/crio-85aed47e17be02089454ae709bb98e02a653cd57a389079af7c1e794005db080.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podec6e1479_3b96_4c4b_be95_5834172d37ff.slice/crio-e883a3dae22fdf60514c219a9283e92bb5abf320e9d17268731d006765c50594\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podec6e1479_3b96_4c4b_be95_5834172d37ff.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod542090f5_d2d8_4f78_b566_10e9885c341e.slice/crio-conmon-d0a1424bce5344057fa545a1be5e25b8f2ea9bb5d886078a67599c0c2c3b96eb.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc5b34617_b163_4b7a_9950_53f64a8cca2c.slice/crio-947f0a3b15f660d4a027bf43a3fba73c9916d632976aead40013fc2ec14335b9.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod542090f5_d2d8_4f78_b566_10e9885c341e.slice/crio-d10bb205702b34e04df8f3d4dd7f8051ac798a998c12a2c61384341390d931e9.scope\": RecentStats: unable to find data in memory cache]" Oct 06 14:00:24 crc kubenswrapper[4757]: E1006 14:00:24.156012 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"56539da0d904647a7143dee780f395e8c568df58a690438ce63ea5e4176ade53\": container with ID starting with 56539da0d904647a7143dee780f395e8c568df58a690438ce63ea5e4176ade53 not found: ID does not exist" containerID="56539da0d904647a7143dee780f395e8c568df58a690438ce63ea5e4176ade53" Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.156047 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"56539da0d904647a7143dee780f395e8c568df58a690438ce63ea5e4176ade53"} err="failed to get container status \"56539da0d904647a7143dee780f395e8c568df58a690438ce63ea5e4176ade53\": rpc error: code = NotFound desc = could not find container \"56539da0d904647a7143dee780f395e8c568df58a690438ce63ea5e4176ade53\": container with ID starting with 56539da0d904647a7143dee780f395e8c568df58a690438ce63ea5e4176ade53 not found: ID does not exist" Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.156292 4757 scope.go:117] "RemoveContainer" containerID="c6ba27d47c99f13d015277533c3fba4500d832d05fa4632f9ba24c71c9d9ec8e" Oct 06 14:00:24 crc kubenswrapper[4757]: E1006 14:00:24.161692 4757 projected.go:194] Error preparing data for projected volume kube-api-access-fpk6k for pod openstack/keystone2941-account-delete-kjhx5: failed to fetch token: serviceaccounts "galera-openstack" not found Oct 06 14:00:24 crc kubenswrapper[4757]: E1006 14:00:24.162147 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/df034a5b-6848-45e4-9e1b-8613a9ab590f-kube-api-access-fpk6k podName:df034a5b-6848-45e4-9e1b-8613a9ab590f nodeName:}" failed. No retries permitted until 2025-10-06 14:00:25.161735511 +0000 UTC m=+1313.659054048 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-fpk6k" (UniqueName: "kubernetes.io/projected/df034a5b-6848-45e4-9e1b-8613a9ab590f-kube-api-access-fpk6k") pod "keystone2941-account-delete-kjhx5" (UID: "df034a5b-6848-45e4-9e1b-8613a9ab590f") : failed to fetch token: serviceaccounts "galera-openstack" not found Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.173916 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.177518 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c5b34617-b163-4b7a-9950-53f64a8cca2c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c5b34617-b163-4b7a-9950-53f64a8cca2c" (UID: "c5b34617-b163-4b7a-9950-53f64a8cca2c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.200946 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="022bc107-84fd-4650-a5a3-533d42aef0f0" path="/var/lib/kubelet/pods/022bc107-84fd-4650-a5a3-533d42aef0f0/volumes" Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.201569 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0efda247-fa18-49db-a37d-1dd28d999ed7" path="/var/lib/kubelet/pods/0efda247-fa18-49db-a37d-1dd28d999ed7/volumes" Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.202606 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="47e3132d-b23b-47f2-b26e-5511df70deec" path="/var/lib/kubelet/pods/47e3132d-b23b-47f2-b26e-5511df70deec/volumes" Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.203189 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c5b34617-b163-4b7a-9950-53f64a8cca2c-kube-state-metrics-tls-certs" (OuterVolumeSpecName: "kube-state-metrics-tls-certs") pod "c5b34617-b163-4b7a-9950-53f64a8cca2c" (UID: "c5b34617-b163-4b7a-9950-53f64a8cca2c"). InnerVolumeSpecName "kube-state-metrics-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.203759 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5a2f1fd8-9191-40cd-9edb-6a681214aaa9" path="/var/lib/kubelet/pods/5a2f1fd8-9191-40cd-9edb-6a681214aaa9/volumes" Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.204549 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="76cdc407-bb9a-44dc-82eb-c9b083395c8d" path="/var/lib/kubelet/pods/76cdc407-bb9a-44dc-82eb-c9b083395c8d/volumes" Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.205268 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="780fa947-0a68-4231-b5b3-e0cad80204d2" path="/var/lib/kubelet/pods/780fa947-0a68-4231-b5b3-e0cad80204d2/volumes" Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.206381 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a274a347-259a-4919-8326-8047df9b0de8" path="/var/lib/kubelet/pods/a274a347-259a-4919-8326-8047df9b0de8/volumes" Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.207219 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e0f6378b-8aab-41e0-a041-77d6eaa303b7" path="/var/lib/kubelet/pods/e0f6378b-8aab-41e0-a041-77d6eaa303b7/volumes" Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.208071 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c5b34617-b163-4b7a-9950-53f64a8cca2c-kube-state-metrics-tls-config" (OuterVolumeSpecName: "kube-state-metrics-tls-config") pod "c5b34617-b163-4b7a-9950-53f64a8cca2c" (UID: "c5b34617-b163-4b7a-9950-53f64a8cca2c"). InnerVolumeSpecName "kube-state-metrics-tls-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.208201 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e243d6cc-b188-4dbb-a95e-21b9e6d52eeb" path="/var/lib/kubelet/pods/e243d6cc-b188-4dbb-a95e-21b9e6d52eeb/volumes" Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.208771 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f2520c55-6861-4a9a-aa4e-8c1b1ace3dfb" path="/var/lib/kubelet/pods/f2520c55-6861-4a9a-aa4e-8c1b1ace3dfb/volumes" Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.209405 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f38ce569-0a5c-408d-9d44-bb953d38e24e" path="/var/lib/kubelet/pods/f38ce569-0a5c-408d-9d44-bb953d38e24e/volumes" Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.210771 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f8e78f45-d6b0-44d0-b84f-30ac3538ec3c" path="/var/lib/kubelet/pods/f8e78f45-d6b0-44d0-b84f-30ac3538ec3c/volumes" Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.211760 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstack-cell1-galera-0"] Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.211794 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstack-cell1-galera-0"] Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.211812 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-bpnp8"] Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.211826 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-bpnp8"] Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.220915 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron2bb5-account-delete-r8kww"] Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.233274 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.233990 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.241867 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-2bb5-account-create-8m8c6"] Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.253391 4757 reconciler_common.go:293] "Volume detached for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/c5b34617-b163-4b7a-9950-53f64a8cca2c-kube-state-metrics-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.253429 4757 reconciler_common.go:293] "Volume detached for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/c5b34617-b163-4b7a-9950-53f64a8cca2c-kube-state-metrics-tls-config\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.253443 4757 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c5b34617-b163-4b7a-9950-53f64a8cca2c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.253458 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8lpcc\" (UniqueName: \"kubernetes.io/projected/c5b34617-b163-4b7a-9950-53f64a8cca2c-kube-api-access-8lpcc\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.265078 4757 scope.go:117] "RemoveContainer" containerID="8b51647808cd10dd44bf57888634b8904b7ecd8ed93207d6c169f832ac168892" Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.282268 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.291636 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-2bb5-account-create-8m8c6"] Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.304160 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-proxy-7c7848899c-8bm7g"] Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.308242 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/swift-proxy-7c7848899c-8bm7g"] Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.319959 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-8f2b9"] Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.329151 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-8f2b9"] Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.338578 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-d1d1-account-create-pfx4p"] Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.355054 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nmd5w\" (UniqueName: \"kubernetes.io/projected/a21e8c5a-5819-4e8c-9b20-5353625fc36b-kube-api-access-nmd5w\") pod \"a21e8c5a-5819-4e8c-9b20-5353625fc36b\" (UID: \"a21e8c5a-5819-4e8c-9b20-5353625fc36b\") " Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.355328 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a21e8c5a-5819-4e8c-9b20-5353625fc36b-combined-ca-bundle\") pod \"a21e8c5a-5819-4e8c-9b20-5353625fc36b\" (UID: \"a21e8c5a-5819-4e8c-9b20-5353625fc36b\") " Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.355377 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a21e8c5a-5819-4e8c-9b20-5353625fc36b-config-data\") pod \"a21e8c5a-5819-4e8c-9b20-5353625fc36b\" (UID: \"a21e8c5a-5819-4e8c-9b20-5353625fc36b\") " Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.355451 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/a21e8c5a-5819-4e8c-9b20-5353625fc36b-memcached-tls-certs\") pod \"a21e8c5a-5819-4e8c-9b20-5353625fc36b\" (UID: \"a21e8c5a-5819-4e8c-9b20-5353625fc36b\") " Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.355504 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/a21e8c5a-5819-4e8c-9b20-5353625fc36b-kolla-config\") pod \"a21e8c5a-5819-4e8c-9b20-5353625fc36b\" (UID: \"a21e8c5a-5819-4e8c-9b20-5353625fc36b\") " Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.356436 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a21e8c5a-5819-4e8c-9b20-5353625fc36b-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "a21e8c5a-5819-4e8c-9b20-5353625fc36b" (UID: "a21e8c5a-5819-4e8c-9b20-5353625fc36b"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.356474 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a21e8c5a-5819-4e8c-9b20-5353625fc36b-config-data" (OuterVolumeSpecName: "config-data") pod "a21e8c5a-5819-4e8c-9b20-5353625fc36b" (UID: "a21e8c5a-5819-4e8c-9b20-5353625fc36b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.368398 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-d1d1-account-create-pfx4p"] Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.385341 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a21e8c5a-5819-4e8c-9b20-5353625fc36b-kube-api-access-nmd5w" (OuterVolumeSpecName: "kube-api-access-nmd5w") pod "a21e8c5a-5819-4e8c-9b20-5353625fc36b" (UID: "a21e8c5a-5819-4e8c-9b20-5353625fc36b"). InnerVolumeSpecName "kube-api-access-nmd5w". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.397212 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/novaapid1d1-account-delete-j7wzz"] Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.408237 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-6hgd2"] Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.410228 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-6hgd2"] Oct 06 14:00:24 crc kubenswrapper[4757]: E1006 14:00:24.425366 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a21e8c5a-5819-4e8c-9b20-5353625fc36b-combined-ca-bundle podName:a21e8c5a-5819-4e8c-9b20-5353625fc36b nodeName:}" failed. No retries permitted until 2025-10-06 14:00:24.925338096 +0000 UTC m=+1313.422656633 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/a21e8c5a-5819-4e8c-9b20-5353625fc36b-combined-ca-bundle") pod "a21e8c5a-5819-4e8c-9b20-5353625fc36b" (UID: "a21e8c5a-5819-4e8c-9b20-5353625fc36b") : error deleting /var/lib/kubelet/pods/a21e8c5a-5819-4e8c-9b20-5353625fc36b/volume-subpaths: remove /var/lib/kubelet/pods/a21e8c5a-5819-4e8c-9b20-5353625fc36b/volume-subpaths: no such file or directory Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.446172 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-0cee-account-create-hlzct"] Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.446247 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-0cee-account-create-hlzct"] Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.447930 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a21e8c5a-5819-4e8c-9b20-5353625fc36b-memcached-tls-certs" (OuterVolumeSpecName: "memcached-tls-certs") pod "a21e8c5a-5819-4e8c-9b20-5353625fc36b" (UID: "a21e8c5a-5819-4e8c-9b20-5353625fc36b"). InnerVolumeSpecName "memcached-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.452670 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/novacell00cee-account-delete-2rcjp"] Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.458498 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nmd5w\" (UniqueName: \"kubernetes.io/projected/a21e8c5a-5819-4e8c-9b20-5353625fc36b-kube-api-access-nmd5w\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.458708 4757 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a21e8c5a-5819-4e8c-9b20-5353625fc36b-config-data\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.458794 4757 reconciler_common.go:293] "Volume detached for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/a21e8c5a-5819-4e8c-9b20-5353625fc36b-memcached-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.458871 4757 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/a21e8c5a-5819-4e8c-9b20-5353625fc36b-kolla-config\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.688283 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.764323 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/562413a3-660e-4ed9-92d6-23cb7d84b936-config-data\") pod \"562413a3-660e-4ed9-92d6-23cb7d84b936\" (UID: \"562413a3-660e-4ed9-92d6-23cb7d84b936\") " Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.764427 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjgg2\" (UniqueName: \"kubernetes.io/projected/562413a3-660e-4ed9-92d6-23cb7d84b936-kube-api-access-pjgg2\") pod \"562413a3-660e-4ed9-92d6-23cb7d84b936\" (UID: \"562413a3-660e-4ed9-92d6-23cb7d84b936\") " Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.764447 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/562413a3-660e-4ed9-92d6-23cb7d84b936-logs\") pod \"562413a3-660e-4ed9-92d6-23cb7d84b936\" (UID: \"562413a3-660e-4ed9-92d6-23cb7d84b936\") " Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.764465 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/562413a3-660e-4ed9-92d6-23cb7d84b936-combined-ca-bundle\") pod \"562413a3-660e-4ed9-92d6-23cb7d84b936\" (UID: \"562413a3-660e-4ed9-92d6-23cb7d84b936\") " Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.764569 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/562413a3-660e-4ed9-92d6-23cb7d84b936-nova-metadata-tls-certs\") pod \"562413a3-660e-4ed9-92d6-23cb7d84b936\" (UID: \"562413a3-660e-4ed9-92d6-23cb7d84b936\") " Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.768682 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/562413a3-660e-4ed9-92d6-23cb7d84b936-logs" (OuterVolumeSpecName: "logs") pod "562413a3-660e-4ed9-92d6-23cb7d84b936" (UID: "562413a3-660e-4ed9-92d6-23cb7d84b936"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.769446 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/562413a3-660e-4ed9-92d6-23cb7d84b936-kube-api-access-pjgg2" (OuterVolumeSpecName: "kube-api-access-pjgg2") pod "562413a3-660e-4ed9-92d6-23cb7d84b936" (UID: "562413a3-660e-4ed9-92d6-23cb7d84b936"). InnerVolumeSpecName "kube-api-access-pjgg2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.798652 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/562413a3-660e-4ed9-92d6-23cb7d84b936-config-data" (OuterVolumeSpecName: "config-data") pod "562413a3-660e-4ed9-92d6-23cb7d84b936" (UID: "562413a3-660e-4ed9-92d6-23cb7d84b936"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.811499 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/562413a3-660e-4ed9-92d6-23cb7d84b936-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "562413a3-660e-4ed9-92d6-23cb7d84b936" (UID: "562413a3-660e-4ed9-92d6-23cb7d84b936"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.834271 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/562413a3-660e-4ed9-92d6-23cb7d84b936-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "562413a3-660e-4ed9-92d6-23cb7d84b936" (UID: "562413a3-660e-4ed9-92d6-23cb7d84b936"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.867167 4757 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/562413a3-660e-4ed9-92d6-23cb7d84b936-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.867415 4757 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/562413a3-660e-4ed9-92d6-23cb7d84b936-config-data\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.867424 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjgg2\" (UniqueName: \"kubernetes.io/projected/562413a3-660e-4ed9-92d6-23cb7d84b936-kube-api-access-pjgg2\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.867432 4757 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/562413a3-660e-4ed9-92d6-23cb7d84b936-logs\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.867441 4757 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/562413a3-660e-4ed9-92d6-23cb7d84b936-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.907358 4757 generic.go:334] "Generic (PLEG): container finished" podID="afc51d15-69dd-4900-886c-29a4f372df24" containerID="501ec0020527b58bea34dc6aa1f5770171c8c96af98e8cc181a6f0a30690207d" exitCode=0 Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.907442 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-ff5468974-c5722" event={"ID":"afc51d15-69dd-4900-886c-29a4f372df24","Type":"ContainerDied","Data":"501ec0020527b58bea34dc6aa1f5770171c8c96af98e8cc181a6f0a30690207d"} Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.908808 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"a21e8c5a-5819-4e8c-9b20-5353625fc36b","Type":"ContainerDied","Data":"d97690835875d33ed098d272fa083111b478485f4b1e3e9f63ee3cc3077f6a86"} Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.908853 4757 scope.go:117] "RemoveContainer" containerID="ff662bc600af80633dbf9ed1780d1a31e3db1b29ac07bbca32eb0ee8886ef72f" Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.908956 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.918732 4757 generic.go:334] "Generic (PLEG): container finished" podID="542090f5-d2d8-4f78-b566-10e9885c341e" containerID="d0a1424bce5344057fa545a1be5e25b8f2ea9bb5d886078a67599c0c2c3b96eb" exitCode=0 Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.918800 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"542090f5-d2d8-4f78-b566-10e9885c341e","Type":"ContainerDied","Data":"d0a1424bce5344057fa545a1be5e25b8f2ea9bb5d886078a67599c0c2c3b96eb"} Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.922196 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"c5b34617-b163-4b7a-9950-53f64a8cca2c","Type":"ContainerDied","Data":"abc59daaf7bc965011509a5abe63acfe29e77cd751ce4c4da520fb488e2e5282"} Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.922266 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.932783 4757 generic.go:334] "Generic (PLEG): container finished" podID="78a4414b-7eec-457f-b08c-aeb719ffc320" containerID="627201afc996e1b5ebc9ec2b49081817f4957580a028ef0ede8ac13918294d2e" exitCode=0 Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.932860 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"78a4414b-7eec-457f-b08c-aeb719ffc320","Type":"ContainerDied","Data":"627201afc996e1b5ebc9ec2b49081817f4957580a028ef0ede8ac13918294d2e"} Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.949231 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.953278 4757 generic.go:334] "Generic (PLEG): container finished" podID="562413a3-660e-4ed9-92d6-23cb7d84b936" containerID="a330434e1329166fdf158a4f47bc672f8e6b950f06a5a900972a0d32c53e1214" exitCode=0 Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.953382 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"562413a3-660e-4ed9-92d6-23cb7d84b936","Type":"ContainerDied","Data":"a330434e1329166fdf158a4f47bc672f8e6b950f06a5a900972a0d32c53e1214"} Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.953409 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"562413a3-660e-4ed9-92d6-23cb7d84b936","Type":"ContainerDied","Data":"b58dcfbdf92a639666169c93fd3e7256db5479f38d5082dfae579dd37e27cd69"} Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.953490 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.955764 4757 scope.go:117] "RemoveContainer" containerID="947f0a3b15f660d4a027bf43a3fba73c9916d632976aead40013fc2ec14335b9" Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.975405 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.976005 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a21e8c5a-5819-4e8c-9b20-5353625fc36b-combined-ca-bundle\") pod \"a21e8c5a-5819-4e8c-9b20-5353625fc36b\" (UID: \"a21e8c5a-5819-4e8c-9b20-5353625fc36b\") " Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.981332 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a21e8c5a-5819-4e8c-9b20-5353625fc36b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a21e8c5a-5819-4e8c-9b20-5353625fc36b" (UID: "a21e8c5a-5819-4e8c-9b20-5353625fc36b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.983052 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-ff5468974-c5722" Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.983799 4757 generic.go:334] "Generic (PLEG): container finished" podID="985540de-3212-41f4-a3a6-180ff5c4eda2" containerID="103f1cb6b966566428982058c2497d82d3412e2a56a570fe97a8556bbaf90b4e" exitCode=0 Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.983854 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-6f4c564f58-t9kxj" event={"ID":"985540de-3212-41f4-a3a6-180ff5c4eda2","Type":"ContainerDied","Data":"103f1cb6b966566428982058c2497d82d3412e2a56a570fe97a8556bbaf90b4e"} Oct 06 14:00:24 crc kubenswrapper[4757]: I1006 14:00:24.994274 4757 scope.go:117] "RemoveContainer" containerID="a330434e1329166fdf158a4f47bc672f8e6b950f06a5a900972a0d32c53e1214" Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.016021 4757 generic.go:334] "Generic (PLEG): container finished" podID="4da5a9db-df84-4b71-b566-7c723fd7eb65" containerID="fe4d0be031635e1711fdd14e7751dbf7c8f1ff14ed259fe9f2d2457646f22b8f" exitCode=0 Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.016548 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"4da5a9db-df84-4b71-b566-7c723fd7eb65","Type":"ContainerDied","Data":"fe4d0be031635e1711fdd14e7751dbf7c8f1ff14ed259fe9f2d2457646f22b8f"} Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.016974 4757 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openstack/novacell00cee-account-delete-2rcjp" secret="" err="secret \"galera-openstack-dockercfg-7gzsb\" not found" Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.017489 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone2941-account-delete-kjhx5" Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.039073 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.044224 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone2941-account-delete-kjhx5" Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.053585 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.061314 4757 scope.go:117] "RemoveContainer" containerID="f0fd96582d94c846019bab0c120449cbd9e2f834e146e3c3bd6f8f0ec4a69ae9" Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.078003 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/afc51d15-69dd-4900-886c-29a4f372df24-public-tls-certs\") pod \"afc51d15-69dd-4900-886c-29a4f372df24\" (UID: \"afc51d15-69dd-4900-886c-29a4f372df24\") " Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.078064 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/afc51d15-69dd-4900-886c-29a4f372df24-internal-tls-certs\") pod \"afc51d15-69dd-4900-886c-29a4f372df24\" (UID: \"afc51d15-69dd-4900-886c-29a4f372df24\") " Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.078120 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/afc51d15-69dd-4900-886c-29a4f372df24-combined-ca-bundle\") pod \"afc51d15-69dd-4900-886c-29a4f372df24\" (UID: \"afc51d15-69dd-4900-886c-29a4f372df24\") " Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.078141 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tcj28\" (UniqueName: \"kubernetes.io/projected/afc51d15-69dd-4900-886c-29a4f372df24-kube-api-access-tcj28\") pod \"afc51d15-69dd-4900-886c-29a4f372df24\" (UID: \"afc51d15-69dd-4900-886c-29a4f372df24\") " Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.078193 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/afc51d15-69dd-4900-886c-29a4f372df24-config-data\") pod \"afc51d15-69dd-4900-886c-29a4f372df24\" (UID: \"afc51d15-69dd-4900-886c-29a4f372df24\") " Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.078283 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/afc51d15-69dd-4900-886c-29a4f372df24-scripts\") pod \"afc51d15-69dd-4900-886c-29a4f372df24\" (UID: \"afc51d15-69dd-4900-886c-29a4f372df24\") " Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.078303 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/afc51d15-69dd-4900-886c-29a4f372df24-logs\") pod \"afc51d15-69dd-4900-886c-29a4f372df24\" (UID: \"afc51d15-69dd-4900-886c-29a4f372df24\") " Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.078621 4757 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a21e8c5a-5819-4e8c-9b20-5353625fc36b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.079755 4757 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-5c78d5595b-89h8z" podUID="ac42434c-8367-4cf2-9134-2d85444f90f4" containerName="barbican-api" probeResult="failure" output="Get \"https://10.217.0.157:9311/healthcheck\": read tcp 10.217.0.2:36678->10.217.0.157:9311: read: connection reset by peer" Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.080017 4757 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-5c78d5595b-89h8z" podUID="ac42434c-8367-4cf2-9134-2d85444f90f4" containerName="barbican-api-log" probeResult="failure" output="Get \"https://10.217.0.157:9311/healthcheck\": read tcp 10.217.0.2:36672->10.217.0.157:9311: read: connection reset by peer" Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.084006 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/afc51d15-69dd-4900-886c-29a4f372df24-kube-api-access-tcj28" (OuterVolumeSpecName: "kube-api-access-tcj28") pod "afc51d15-69dd-4900-886c-29a4f372df24" (UID: "afc51d15-69dd-4900-886c-29a4f372df24"). InnerVolumeSpecName "kube-api-access-tcj28". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.099638 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/afc51d15-69dd-4900-886c-29a4f372df24-logs" (OuterVolumeSpecName: "logs") pod "afc51d15-69dd-4900-886c-29a4f372df24" (UID: "afc51d15-69dd-4900-886c-29a4f372df24"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.103920 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/afc51d15-69dd-4900-886c-29a4f372df24-scripts" (OuterVolumeSpecName: "scripts") pod "afc51d15-69dd-4900-886c-29a4f372df24" (UID: "afc51d15-69dd-4900-886c-29a4f372df24"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.107359 4757 scope.go:117] "RemoveContainer" containerID="a330434e1329166fdf158a4f47bc672f8e6b950f06a5a900972a0d32c53e1214" Oct 06 14:00:25 crc kubenswrapper[4757]: E1006 14:00:25.107804 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a330434e1329166fdf158a4f47bc672f8e6b950f06a5a900972a0d32c53e1214\": container with ID starting with a330434e1329166fdf158a4f47bc672f8e6b950f06a5a900972a0d32c53e1214 not found: ID does not exist" containerID="a330434e1329166fdf158a4f47bc672f8e6b950f06a5a900972a0d32c53e1214" Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.107842 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a330434e1329166fdf158a4f47bc672f8e6b950f06a5a900972a0d32c53e1214"} err="failed to get container status \"a330434e1329166fdf158a4f47bc672f8e6b950f06a5a900972a0d32c53e1214\": rpc error: code = NotFound desc = could not find container \"a330434e1329166fdf158a4f47bc672f8e6b950f06a5a900972a0d32c53e1214\": container with ID starting with a330434e1329166fdf158a4f47bc672f8e6b950f06a5a900972a0d32c53e1214 not found: ID does not exist" Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.107864 4757 scope.go:117] "RemoveContainer" containerID="f0fd96582d94c846019bab0c120449cbd9e2f834e146e3c3bd6f8f0ec4a69ae9" Oct 06 14:00:25 crc kubenswrapper[4757]: E1006 14:00:25.108423 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f0fd96582d94c846019bab0c120449cbd9e2f834e146e3c3bd6f8f0ec4a69ae9\": container with ID starting with f0fd96582d94c846019bab0c120449cbd9e2f834e146e3c3bd6f8f0ec4a69ae9 not found: ID does not exist" containerID="f0fd96582d94c846019bab0c120449cbd9e2f834e146e3c3bd6f8f0ec4a69ae9" Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.108462 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f0fd96582d94c846019bab0c120449cbd9e2f834e146e3c3bd6f8f0ec4a69ae9"} err="failed to get container status \"f0fd96582d94c846019bab0c120449cbd9e2f834e146e3c3bd6f8f0ec4a69ae9\": rpc error: code = NotFound desc = could not find container \"f0fd96582d94c846019bab0c120449cbd9e2f834e146e3c3bd6f8f0ec4a69ae9\": container with ID starting with f0fd96582d94c846019bab0c120449cbd9e2f834e146e3c3bd6f8f0ec4a69ae9 not found: ID does not exist" Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.133902 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.178839 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/afc51d15-69dd-4900-886c-29a4f372df24-config-data" (OuterVolumeSpecName: "config-data") pod "afc51d15-69dd-4900-886c-29a4f372df24" (UID: "afc51d15-69dd-4900-886c-29a4f372df24"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.179458 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4da5a9db-df84-4b71-b566-7c723fd7eb65-combined-ca-bundle\") pod \"4da5a9db-df84-4b71-b566-7c723fd7eb65\" (UID: \"4da5a9db-df84-4b71-b566-7c723fd7eb65\") " Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.179525 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4da5a9db-df84-4b71-b566-7c723fd7eb65-config-data\") pod \"4da5a9db-df84-4b71-b566-7c723fd7eb65\" (UID: \"4da5a9db-df84-4b71-b566-7c723fd7eb65\") " Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.179552 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4da5a9db-df84-4b71-b566-7c723fd7eb65-public-tls-certs\") pod \"4da5a9db-df84-4b71-b566-7c723fd7eb65\" (UID: \"4da5a9db-df84-4b71-b566-7c723fd7eb65\") " Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.179596 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4da5a9db-df84-4b71-b566-7c723fd7eb65-logs\") pod \"4da5a9db-df84-4b71-b566-7c723fd7eb65\" (UID: \"4da5a9db-df84-4b71-b566-7c723fd7eb65\") " Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.179614 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"4da5a9db-df84-4b71-b566-7c723fd7eb65\" (UID: \"4da5a9db-df84-4b71-b566-7c723fd7eb65\") " Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.179646 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z4vb9\" (UniqueName: \"kubernetes.io/projected/4da5a9db-df84-4b71-b566-7c723fd7eb65-kube-api-access-z4vb9\") pod \"4da5a9db-df84-4b71-b566-7c723fd7eb65\" (UID: \"4da5a9db-df84-4b71-b566-7c723fd7eb65\") " Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.179715 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4da5a9db-df84-4b71-b566-7c723fd7eb65-httpd-run\") pod \"4da5a9db-df84-4b71-b566-7c723fd7eb65\" (UID: \"4da5a9db-df84-4b71-b566-7c723fd7eb65\") " Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.179743 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4da5a9db-df84-4b71-b566-7c723fd7eb65-scripts\") pod \"4da5a9db-df84-4b71-b566-7c723fd7eb65\" (UID: \"4da5a9db-df84-4b71-b566-7c723fd7eb65\") " Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.180015 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fpk6k\" (UniqueName: \"kubernetes.io/projected/df034a5b-6848-45e4-9e1b-8613a9ab590f-kube-api-access-fpk6k\") pod \"keystone2941-account-delete-kjhx5\" (UID: \"df034a5b-6848-45e4-9e1b-8613a9ab590f\") " pod="openstack/keystone2941-account-delete-kjhx5" Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.180165 4757 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/afc51d15-69dd-4900-886c-29a4f372df24-scripts\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.180179 4757 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/afc51d15-69dd-4900-886c-29a4f372df24-logs\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.180188 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tcj28\" (UniqueName: \"kubernetes.io/projected/afc51d15-69dd-4900-886c-29a4f372df24-kube-api-access-tcj28\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.180196 4757 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/afc51d15-69dd-4900-886c-29a4f372df24-config-data\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.194477 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage05-crc" (OuterVolumeSpecName: "glance") pod "4da5a9db-df84-4b71-b566-7c723fd7eb65" (UID: "4da5a9db-df84-4b71-b566-7c723fd7eb65"). InnerVolumeSpecName "local-storage05-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.198416 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4da5a9db-df84-4b71-b566-7c723fd7eb65-kube-api-access-z4vb9" (OuterVolumeSpecName: "kube-api-access-z4vb9") pod "4da5a9db-df84-4b71-b566-7c723fd7eb65" (UID: "4da5a9db-df84-4b71-b566-7c723fd7eb65"). InnerVolumeSpecName "kube-api-access-z4vb9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 14:00:25 crc kubenswrapper[4757]: E1006 14:00:25.198575 4757 projected.go:194] Error preparing data for projected volume kube-api-access-fpk6k for pod openstack/keystone2941-account-delete-kjhx5: failed to fetch token: serviceaccounts "galera-openstack" not found Oct 06 14:00:25 crc kubenswrapper[4757]: E1006 14:00:25.202629 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/df034a5b-6848-45e4-9e1b-8613a9ab590f-kube-api-access-fpk6k podName:df034a5b-6848-45e4-9e1b-8613a9ab590f nodeName:}" failed. No retries permitted until 2025-10-06 14:00:27.202601187 +0000 UTC m=+1315.699919724 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-fpk6k" (UniqueName: "kubernetes.io/projected/df034a5b-6848-45e4-9e1b-8613a9ab590f-kube-api-access-fpk6k") pod "keystone2941-account-delete-kjhx5" (UID: "df034a5b-6848-45e4-9e1b-8613a9ab590f") : failed to fetch token: serviceaccounts "galera-openstack" not found Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.203007 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4da5a9db-df84-4b71-b566-7c723fd7eb65-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "4da5a9db-df84-4b71-b566-7c723fd7eb65" (UID: "4da5a9db-df84-4b71-b566-7c723fd7eb65"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.207205 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4da5a9db-df84-4b71-b566-7c723fd7eb65-scripts" (OuterVolumeSpecName: "scripts") pod "4da5a9db-df84-4b71-b566-7c723fd7eb65" (UID: "4da5a9db-df84-4b71-b566-7c723fd7eb65"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.209761 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4da5a9db-df84-4b71-b566-7c723fd7eb65-logs" (OuterVolumeSpecName: "logs") pod "4da5a9db-df84-4b71-b566-7c723fd7eb65" (UID: "4da5a9db-df84-4b71-b566-7c723fd7eb65"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.223851 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/afc51d15-69dd-4900-886c-29a4f372df24-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "afc51d15-69dd-4900-886c-29a4f372df24" (UID: "afc51d15-69dd-4900-886c-29a4f372df24"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.246765 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4da5a9db-df84-4b71-b566-7c723fd7eb65-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4da5a9db-df84-4b71-b566-7c723fd7eb65" (UID: "4da5a9db-df84-4b71-b566-7c723fd7eb65"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.248980 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.286668 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xzswt\" (UniqueName: \"kubernetes.io/projected/78a4414b-7eec-457f-b08c-aeb719ffc320-kube-api-access-xzswt\") pod \"78a4414b-7eec-457f-b08c-aeb719ffc320\" (UID: \"78a4414b-7eec-457f-b08c-aeb719ffc320\") " Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.286749 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"78a4414b-7eec-457f-b08c-aeb719ffc320\" (UID: \"78a4414b-7eec-457f-b08c-aeb719ffc320\") " Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.286821 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/78a4414b-7eec-457f-b08c-aeb719ffc320-scripts\") pod \"78a4414b-7eec-457f-b08c-aeb719ffc320\" (UID: \"78a4414b-7eec-457f-b08c-aeb719ffc320\") " Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.298171 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/78a4414b-7eec-457f-b08c-aeb719ffc320-kube-api-access-xzswt" (OuterVolumeSpecName: "kube-api-access-xzswt") pod "78a4414b-7eec-457f-b08c-aeb719ffc320" (UID: "78a4414b-7eec-457f-b08c-aeb719ffc320"). InnerVolumeSpecName "kube-api-access-xzswt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.305283 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/78a4414b-7eec-457f-b08c-aeb719ffc320-scripts" (OuterVolumeSpecName: "scripts") pod "78a4414b-7eec-457f-b08c-aeb719ffc320" (UID: "78a4414b-7eec-457f-b08c-aeb719ffc320"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.317471 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/78a4414b-7eec-457f-b08c-aeb719ffc320-internal-tls-certs\") pod \"78a4414b-7eec-457f-b08c-aeb719ffc320\" (UID: \"78a4414b-7eec-457f-b08c-aeb719ffc320\") " Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.317584 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/78a4414b-7eec-457f-b08c-aeb719ffc320-config-data\") pod \"78a4414b-7eec-457f-b08c-aeb719ffc320\" (UID: \"78a4414b-7eec-457f-b08c-aeb719ffc320\") " Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.317641 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/78a4414b-7eec-457f-b08c-aeb719ffc320-combined-ca-bundle\") pod \"78a4414b-7eec-457f-b08c-aeb719ffc320\" (UID: \"78a4414b-7eec-457f-b08c-aeb719ffc320\") " Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.317719 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/78a4414b-7eec-457f-b08c-aeb719ffc320-logs\") pod \"78a4414b-7eec-457f-b08c-aeb719ffc320\" (UID: \"78a4414b-7eec-457f-b08c-aeb719ffc320\") " Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.317768 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/78a4414b-7eec-457f-b08c-aeb719ffc320-httpd-run\") pod \"78a4414b-7eec-457f-b08c-aeb719ffc320\" (UID: \"78a4414b-7eec-457f-b08c-aeb719ffc320\") " Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.318919 4757 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4da5a9db-df84-4b71-b566-7c723fd7eb65-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.318947 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xzswt\" (UniqueName: \"kubernetes.io/projected/78a4414b-7eec-457f-b08c-aeb719ffc320-kube-api-access-xzswt\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.318980 4757 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" " Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.318992 4757 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4da5a9db-df84-4b71-b566-7c723fd7eb65-logs\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.319009 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z4vb9\" (UniqueName: \"kubernetes.io/projected/4da5a9db-df84-4b71-b566-7c723fd7eb65-kube-api-access-z4vb9\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.319020 4757 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/78a4414b-7eec-457f-b08c-aeb719ffc320-scripts\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.319031 4757 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/afc51d15-69dd-4900-886c-29a4f372df24-public-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.319041 4757 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4da5a9db-df84-4b71-b566-7c723fd7eb65-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.319056 4757 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4da5a9db-df84-4b71-b566-7c723fd7eb65-scripts\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.319914 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/78a4414b-7eec-457f-b08c-aeb719ffc320-logs" (OuterVolumeSpecName: "logs") pod "78a4414b-7eec-457f-b08c-aeb719ffc320" (UID: "78a4414b-7eec-457f-b08c-aeb719ffc320"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.320662 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/78a4414b-7eec-457f-b08c-aeb719ffc320-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "78a4414b-7eec-457f-b08c-aeb719ffc320" (UID: "78a4414b-7eec-457f-b08c-aeb719ffc320"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.322591 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/afc51d15-69dd-4900-886c-29a4f372df24-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "afc51d15-69dd-4900-886c-29a4f372df24" (UID: "afc51d15-69dd-4900-886c-29a4f372df24"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.332014 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage02-crc" (OuterVolumeSpecName: "glance") pod "78a4414b-7eec-457f-b08c-aeb719ffc320" (UID: "78a4414b-7eec-457f-b08c-aeb719ffc320"). InnerVolumeSpecName "local-storage02-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.339209 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4da5a9db-df84-4b71-b566-7c723fd7eb65-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "4da5a9db-df84-4b71-b566-7c723fd7eb65" (UID: "4da5a9db-df84-4b71-b566-7c723fd7eb65"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.346525 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/memcached-0"] Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.356691 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/memcached-0"] Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.358703 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4da5a9db-df84-4b71-b566-7c723fd7eb65-config-data" (OuterVolumeSpecName: "config-data") pod "4da5a9db-df84-4b71-b566-7c723fd7eb65" (UID: "4da5a9db-df84-4b71-b566-7c723fd7eb65"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 14:00:25 crc kubenswrapper[4757]: E1006 14:00:25.403533 4757 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="1c6cdcbf807850c39d917f526c3686bad9485f567af58c814535019523f2074b" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Oct 06 14:00:25 crc kubenswrapper[4757]: E1006 14:00:25.408253 4757 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="1c6cdcbf807850c39d917f526c3686bad9485f567af58c814535019523f2074b" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Oct 06 14:00:25 crc kubenswrapper[4757]: E1006 14:00:25.410433 4757 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="1c6cdcbf807850c39d917f526c3686bad9485f567af58c814535019523f2074b" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Oct 06 14:00:25 crc kubenswrapper[4757]: E1006 14:00:25.410470 4757 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-northd-0" podUID="cc01b313-87cb-44f6-9c85-84ae4931e1f6" containerName="ovn-northd" Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.422081 4757 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/78a4414b-7eec-457f-b08c-aeb719ffc320-logs\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.422224 4757 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/78a4414b-7eec-457f-b08c-aeb719ffc320-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.422234 4757 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4da5a9db-df84-4b71-b566-7c723fd7eb65-config-data\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.422243 4757 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4da5a9db-df84-4b71-b566-7c723fd7eb65-public-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.422297 4757 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" " Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.422307 4757 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/afc51d15-69dd-4900-886c-29a4f372df24-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.434266 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/78a4414b-7eec-457f-b08c-aeb719ffc320-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "78a4414b-7eec-457f-b08c-aeb719ffc320" (UID: "78a4414b-7eec-457f-b08c-aeb719ffc320"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.453835 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/78a4414b-7eec-457f-b08c-aeb719ffc320-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "78a4414b-7eec-457f-b08c-aeb719ffc320" (UID: "78a4414b-7eec-457f-b08c-aeb719ffc320"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.463992 4757 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage02-crc" (UniqueName: "kubernetes.io/local-volume/local-storage02-crc") on node "crc" Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.478006 4757 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage05-crc" (UniqueName: "kubernetes.io/local-volume/local-storage05-crc") on node "crc" Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.499385 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/afc51d15-69dd-4900-886c-29a4f372df24-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "afc51d15-69dd-4900-886c-29a4f372df24" (UID: "afc51d15-69dd-4900-886c-29a4f372df24"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.519993 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/78a4414b-7eec-457f-b08c-aeb719ffc320-config-data" (OuterVolumeSpecName: "config-data") pod "78a4414b-7eec-457f-b08c-aeb719ffc320" (UID: "78a4414b-7eec-457f-b08c-aeb719ffc320"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.523878 4757 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/78a4414b-7eec-457f-b08c-aeb719ffc320-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.523901 4757 reconciler_common.go:293] "Volume detached for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.523910 4757 reconciler_common.go:293] "Volume detached for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.523918 4757 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/afc51d15-69dd-4900-886c-29a4f372df24-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.523926 4757 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/78a4414b-7eec-457f-b08c-aeb719ffc320-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.523934 4757 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/78a4414b-7eec-457f-b08c-aeb719ffc320-config-data\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:25 crc kubenswrapper[4757]: E1006 14:00:25.750862 4757 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ab2102972a92e9dd4b82cbd0edb8ec4655819cab11c8d022f7994573563037f4 is running failed: container process not found" containerID="ab2102972a92e9dd4b82cbd0edb8ec4655819cab11c8d022f7994573563037f4" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Oct 06 14:00:25 crc kubenswrapper[4757]: E1006 14:00:25.751358 4757 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ab2102972a92e9dd4b82cbd0edb8ec4655819cab11c8d022f7994573563037f4 is running failed: container process not found" containerID="ab2102972a92e9dd4b82cbd0edb8ec4655819cab11c8d022f7994573563037f4" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Oct 06 14:00:25 crc kubenswrapper[4757]: E1006 14:00:25.751757 4757 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ab2102972a92e9dd4b82cbd0edb8ec4655819cab11c8d022f7994573563037f4 is running failed: container process not found" containerID="ab2102972a92e9dd4b82cbd0edb8ec4655819cab11c8d022f7994573563037f4" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Oct 06 14:00:25 crc kubenswrapper[4757]: E1006 14:00:25.751802 4757 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ab2102972a92e9dd4b82cbd0edb8ec4655819cab11c8d022f7994573563037f4 is running failed: container process not found" probeType="Readiness" pod="openstack/nova-cell1-conductor-0" podUID="71099fbe-349d-4a04-857c-41f270ec89af" containerName="nova-cell1-conductor-conductor" Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.849875 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.863875 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/novaapid1d1-account-delete-j7wzz" Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.867689 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-6f4c564f58-t9kxj" Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.879501 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron2bb5-account-delete-r8kww" Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.882963 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5c78d5595b-89h8z" Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.905914 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.915940 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.930630 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqlm9\" (UniqueName: \"kubernetes.io/projected/542090f5-d2d8-4f78-b566-10e9885c341e-kube-api-access-fqlm9\") pod \"542090f5-d2d8-4f78-b566-10e9885c341e\" (UID: \"542090f5-d2d8-4f78-b566-10e9885c341e\") " Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.930669 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/542090f5-d2d8-4f78-b566-10e9885c341e-log-httpd\") pod \"542090f5-d2d8-4f78-b566-10e9885c341e\" (UID: \"542090f5-d2d8-4f78-b566-10e9885c341e\") " Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.930687 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/542090f5-d2d8-4f78-b566-10e9885c341e-scripts\") pod \"542090f5-d2d8-4f78-b566-10e9885c341e\" (UID: \"542090f5-d2d8-4f78-b566-10e9885c341e\") " Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.930715 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gdhxl\" (UniqueName: \"kubernetes.io/projected/24ed1753-25de-4bde-8158-52cb7dd6a2f1-kube-api-access-gdhxl\") pod \"24ed1753-25de-4bde-8158-52cb7dd6a2f1\" (UID: \"24ed1753-25de-4bde-8158-52cb7dd6a2f1\") " Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.930749 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/542090f5-d2d8-4f78-b566-10e9885c341e-run-httpd\") pod \"542090f5-d2d8-4f78-b566-10e9885c341e\" (UID: \"542090f5-d2d8-4f78-b566-10e9885c341e\") " Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.930780 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/985540de-3212-41f4-a3a6-180ff5c4eda2-config-data\") pod \"985540de-3212-41f4-a3a6-180ff5c4eda2\" (UID: \"985540de-3212-41f4-a3a6-180ff5c4eda2\") " Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.930808 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/542090f5-d2d8-4f78-b566-10e9885c341e-ceilometer-tls-certs\") pod \"542090f5-d2d8-4f78-b566-10e9885c341e\" (UID: \"542090f5-d2d8-4f78-b566-10e9885c341e\") " Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.930842 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l5zw9\" (UniqueName: \"kubernetes.io/projected/985540de-3212-41f4-a3a6-180ff5c4eda2-kube-api-access-l5zw9\") pod \"985540de-3212-41f4-a3a6-180ff5c4eda2\" (UID: \"985540de-3212-41f4-a3a6-180ff5c4eda2\") " Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.930864 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/542090f5-d2d8-4f78-b566-10e9885c341e-combined-ca-bundle\") pod \"542090f5-d2d8-4f78-b566-10e9885c341e\" (UID: \"542090f5-d2d8-4f78-b566-10e9885c341e\") " Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.930887 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/542090f5-d2d8-4f78-b566-10e9885c341e-config-data\") pod \"542090f5-d2d8-4f78-b566-10e9885c341e\" (UID: \"542090f5-d2d8-4f78-b566-10e9885c341e\") " Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.930930 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/542090f5-d2d8-4f78-b566-10e9885c341e-sg-core-conf-yaml\") pod \"542090f5-d2d8-4f78-b566-10e9885c341e\" (UID: \"542090f5-d2d8-4f78-b566-10e9885c341e\") " Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.930969 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/985540de-3212-41f4-a3a6-180ff5c4eda2-logs\") pod \"985540de-3212-41f4-a3a6-180ff5c4eda2\" (UID: \"985540de-3212-41f4-a3a6-180ff5c4eda2\") " Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.931015 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/985540de-3212-41f4-a3a6-180ff5c4eda2-config-data-custom\") pod \"985540de-3212-41f4-a3a6-180ff5c4eda2\" (UID: \"985540de-3212-41f4-a3a6-180ff5c4eda2\") " Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.931040 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/985540de-3212-41f4-a3a6-180ff5c4eda2-combined-ca-bundle\") pod \"985540de-3212-41f4-a3a6-180ff5c4eda2\" (UID: \"985540de-3212-41f4-a3a6-180ff5c4eda2\") " Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.937126 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/542090f5-d2d8-4f78-b566-10e9885c341e-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "542090f5-d2d8-4f78-b566-10e9885c341e" (UID: "542090f5-d2d8-4f78-b566-10e9885c341e"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.937891 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/985540de-3212-41f4-a3a6-180ff5c4eda2-logs" (OuterVolumeSpecName: "logs") pod "985540de-3212-41f4-a3a6-180ff5c4eda2" (UID: "985540de-3212-41f4-a3a6-180ff5c4eda2"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.938874 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/542090f5-d2d8-4f78-b566-10e9885c341e-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "542090f5-d2d8-4f78-b566-10e9885c341e" (UID: "542090f5-d2d8-4f78-b566-10e9885c341e"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.942244 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/542090f5-d2d8-4f78-b566-10e9885c341e-kube-api-access-fqlm9" (OuterVolumeSpecName: "kube-api-access-fqlm9") pod "542090f5-d2d8-4f78-b566-10e9885c341e" (UID: "542090f5-d2d8-4f78-b566-10e9885c341e"). InnerVolumeSpecName "kube-api-access-fqlm9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.944201 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/985540de-3212-41f4-a3a6-180ff5c4eda2-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "985540de-3212-41f4-a3a6-180ff5c4eda2" (UID: "985540de-3212-41f4-a3a6-180ff5c4eda2"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.949444 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/985540de-3212-41f4-a3a6-180ff5c4eda2-kube-api-access-l5zw9" (OuterVolumeSpecName: "kube-api-access-l5zw9") pod "985540de-3212-41f4-a3a6-180ff5c4eda2" (UID: "985540de-3212-41f4-a3a6-180ff5c4eda2"). InnerVolumeSpecName "kube-api-access-l5zw9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.950223 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/542090f5-d2d8-4f78-b566-10e9885c341e-scripts" (OuterVolumeSpecName: "scripts") pod "542090f5-d2d8-4f78-b566-10e9885c341e" (UID: "542090f5-d2d8-4f78-b566-10e9885c341e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.952296 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.953032 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/24ed1753-25de-4bde-8158-52cb7dd6a2f1-kube-api-access-gdhxl" (OuterVolumeSpecName: "kube-api-access-gdhxl") pod "24ed1753-25de-4bde-8158-52cb7dd6a2f1" (UID: "24ed1753-25de-4bde-8158-52cb7dd6a2f1"). InnerVolumeSpecName "kube-api-access-gdhxl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 14:00:25 crc kubenswrapper[4757]: I1006 14:00:25.997571 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/985540de-3212-41f4-a3a6-180ff5c4eda2-config-data" (OuterVolumeSpecName: "config-data") pod "985540de-3212-41f4-a3a6-180ff5c4eda2" (UID: "985540de-3212-41f4-a3a6-180ff5c4eda2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.017984 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/985540de-3212-41f4-a3a6-180ff5c4eda2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "985540de-3212-41f4-a3a6-180ff5c4eda2" (UID: "985540de-3212-41f4-a3a6-180ff5c4eda2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.029698 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"4da5a9db-df84-4b71-b566-7c723fd7eb65","Type":"ContainerDied","Data":"261f7e3c6d4c3ed7d9173122ab4fee7f0a1c905bbd1f110cf3a80f6ff3e511fb"} Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.029746 4757 scope.go:117] "RemoveContainer" containerID="fe4d0be031635e1711fdd14e7751dbf7c8f1ff14ed259fe9f2d2457646f22b8f" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.029861 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.032503 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ac42434c-8367-4cf2-9134-2d85444f90f4-combined-ca-bundle\") pod \"ac42434c-8367-4cf2-9134-2d85444f90f4\" (UID: \"ac42434c-8367-4cf2-9134-2d85444f90f4\") " Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.032549 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/1489eff7-41ff-420a-bce0-14247f8554ee-config-data-default\") pod \"1489eff7-41ff-420a-bce0-14247f8554ee\" (UID: \"1489eff7-41ff-420a-bce0-14247f8554ee\") " Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.032617 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xdthx\" (UniqueName: \"kubernetes.io/projected/71099fbe-349d-4a04-857c-41f270ec89af-kube-api-access-xdthx\") pod \"71099fbe-349d-4a04-857c-41f270ec89af\" (UID: \"71099fbe-349d-4a04-857c-41f270ec89af\") " Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.032681 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9mfm\" (UniqueName: \"kubernetes.io/projected/c84ddadb-263d-4a4a-bc3f-b645c449e392-kube-api-access-w9mfm\") pod \"c84ddadb-263d-4a4a-bc3f-b645c449e392\" (UID: \"c84ddadb-263d-4a4a-bc3f-b645c449e392\") " Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.032943 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ac42434c-8367-4cf2-9134-2d85444f90f4-logs\") pod \"ac42434c-8367-4cf2-9134-2d85444f90f4\" (UID: \"ac42434c-8367-4cf2-9134-2d85444f90f4\") " Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.032980 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/71099fbe-349d-4a04-857c-41f270ec89af-config-data\") pod \"71099fbe-349d-4a04-857c-41f270ec89af\" (UID: \"71099fbe-349d-4a04-857c-41f270ec89af\") " Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.033080 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/1489eff7-41ff-420a-bce0-14247f8554ee-kolla-config\") pod \"1489eff7-41ff-420a-bce0-14247f8554ee\" (UID: \"1489eff7-41ff-420a-bce0-14247f8554ee\") " Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.033117 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-96c87\" (UniqueName: \"kubernetes.io/projected/ac42434c-8367-4cf2-9134-2d85444f90f4-kube-api-access-96c87\") pod \"ac42434c-8367-4cf2-9134-2d85444f90f4\" (UID: \"ac42434c-8367-4cf2-9134-2d85444f90f4\") " Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.033142 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c84ddadb-263d-4a4a-bc3f-b645c449e392-config-data\") pod \"c84ddadb-263d-4a4a-bc3f-b645c449e392\" (UID: \"c84ddadb-263d-4a4a-bc3f-b645c449e392\") " Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.033160 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1489eff7-41ff-420a-bce0-14247f8554ee-combined-ca-bundle\") pod \"1489eff7-41ff-420a-bce0-14247f8554ee\" (UID: \"1489eff7-41ff-420a-bce0-14247f8554ee\") " Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.033175 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"1489eff7-41ff-420a-bce0-14247f8554ee\" (UID: \"1489eff7-41ff-420a-bce0-14247f8554ee\") " Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.033196 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ac42434c-8367-4cf2-9134-2d85444f90f4-internal-tls-certs\") pod \"ac42434c-8367-4cf2-9134-2d85444f90f4\" (UID: \"ac42434c-8367-4cf2-9134-2d85444f90f4\") " Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.033223 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1489eff7-41ff-420a-bce0-14247f8554ee-operator-scripts\") pod \"1489eff7-41ff-420a-bce0-14247f8554ee\" (UID: \"1489eff7-41ff-420a-bce0-14247f8554ee\") " Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.033262 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ac42434c-8367-4cf2-9134-2d85444f90f4-config-data-custom\") pod \"ac42434c-8367-4cf2-9134-2d85444f90f4\" (UID: \"ac42434c-8367-4cf2-9134-2d85444f90f4\") " Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.033292 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ac42434c-8367-4cf2-9134-2d85444f90f4-public-tls-certs\") pod \"ac42434c-8367-4cf2-9134-2d85444f90f4\" (UID: \"ac42434c-8367-4cf2-9134-2d85444f90f4\") " Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.033317 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/1489eff7-41ff-420a-bce0-14247f8554ee-secrets\") pod \"1489eff7-41ff-420a-bce0-14247f8554ee\" (UID: \"1489eff7-41ff-420a-bce0-14247f8554ee\") " Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.033337 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/1489eff7-41ff-420a-bce0-14247f8554ee-config-data-generated\") pod \"1489eff7-41ff-420a-bce0-14247f8554ee\" (UID: \"1489eff7-41ff-420a-bce0-14247f8554ee\") " Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.033356 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c84ddadb-263d-4a4a-bc3f-b645c449e392-logs\") pod \"c84ddadb-263d-4a4a-bc3f-b645c449e392\" (UID: \"c84ddadb-263d-4a4a-bc3f-b645c449e392\") " Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.033374 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/1489eff7-41ff-420a-bce0-14247f8554ee-galera-tls-certs\") pod \"1489eff7-41ff-420a-bce0-14247f8554ee\" (UID: \"1489eff7-41ff-420a-bce0-14247f8554ee\") " Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.033389 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/71099fbe-349d-4a04-857c-41f270ec89af-combined-ca-bundle\") pod \"71099fbe-349d-4a04-857c-41f270ec89af\" (UID: \"71099fbe-349d-4a04-857c-41f270ec89af\") " Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.033411 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c84ddadb-263d-4a4a-bc3f-b645c449e392-combined-ca-bundle\") pod \"c84ddadb-263d-4a4a-bc3f-b645c449e392\" (UID: \"c84ddadb-263d-4a4a-bc3f-b645c449e392\") " Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.033438 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q9xkr\" (UniqueName: \"kubernetes.io/projected/dffbfc0a-c4ec-41cc-873b-552bc6b7fa69-kube-api-access-q9xkr\") pod \"dffbfc0a-c4ec-41cc-873b-552bc6b7fa69\" (UID: \"dffbfc0a-c4ec-41cc-873b-552bc6b7fa69\") " Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.033453 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k9plq\" (UniqueName: \"kubernetes.io/projected/1489eff7-41ff-420a-bce0-14247f8554ee-kube-api-access-k9plq\") pod \"1489eff7-41ff-420a-bce0-14247f8554ee\" (UID: \"1489eff7-41ff-420a-bce0-14247f8554ee\") " Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.033487 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c84ddadb-263d-4a4a-bc3f-b645c449e392-public-tls-certs\") pod \"c84ddadb-263d-4a4a-bc3f-b645c449e392\" (UID: \"c84ddadb-263d-4a4a-bc3f-b645c449e392\") " Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.033504 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ac42434c-8367-4cf2-9134-2d85444f90f4-config-data\") pod \"ac42434c-8367-4cf2-9134-2d85444f90f4\" (UID: \"ac42434c-8367-4cf2-9134-2d85444f90f4\") " Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.033538 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c84ddadb-263d-4a4a-bc3f-b645c449e392-internal-tls-certs\") pod \"c84ddadb-263d-4a4a-bc3f-b645c449e392\" (UID: \"c84ddadb-263d-4a4a-bc3f-b645c449e392\") " Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.033633 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1489eff7-41ff-420a-bce0-14247f8554ee-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "1489eff7-41ff-420a-bce0-14247f8554ee" (UID: "1489eff7-41ff-420a-bce0-14247f8554ee"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.036418 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-ff5468974-c5722" event={"ID":"afc51d15-69dd-4900-886c-29a4f372df24","Type":"ContainerDied","Data":"273f3d77d426b220594a446856b22413f4c98ec6785cba3d65d9c9dcbc388906"} Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.036604 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-ff5468974-c5722" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.036643 4757 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/985540de-3212-41f4-a3a6-180ff5c4eda2-config-data-custom\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.037235 4757 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/985540de-3212-41f4-a3a6-180ff5c4eda2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.037247 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqlm9\" (UniqueName: \"kubernetes.io/projected/542090f5-d2d8-4f78-b566-10e9885c341e-kube-api-access-fqlm9\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.037258 4757 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/542090f5-d2d8-4f78-b566-10e9885c341e-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.037267 4757 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/542090f5-d2d8-4f78-b566-10e9885c341e-scripts\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.037277 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gdhxl\" (UniqueName: \"kubernetes.io/projected/24ed1753-25de-4bde-8158-52cb7dd6a2f1-kube-api-access-gdhxl\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.037285 4757 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/542090f5-d2d8-4f78-b566-10e9885c341e-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.037294 4757 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/985540de-3212-41f4-a3a6-180ff5c4eda2-config-data\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.037302 4757 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/1489eff7-41ff-420a-bce0-14247f8554ee-config-data-default\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.037311 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l5zw9\" (UniqueName: \"kubernetes.io/projected/985540de-3212-41f4-a3a6-180ff5c4eda2-kube-api-access-l5zw9\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.037320 4757 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/985540de-3212-41f4-a3a6-180ff5c4eda2-logs\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.040325 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ac42434c-8367-4cf2-9134-2d85444f90f4-logs" (OuterVolumeSpecName: "logs") pod "ac42434c-8367-4cf2-9134-2d85444f90f4" (UID: "ac42434c-8367-4cf2-9134-2d85444f90f4"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.040883 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ac42434c-8367-4cf2-9134-2d85444f90f4-kube-api-access-96c87" (OuterVolumeSpecName: "kube-api-access-96c87") pod "ac42434c-8367-4cf2-9134-2d85444f90f4" (UID: "ac42434c-8367-4cf2-9134-2d85444f90f4"). InnerVolumeSpecName "kube-api-access-96c87". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.040970 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c84ddadb-263d-4a4a-bc3f-b645c449e392-kube-api-access-w9mfm" (OuterVolumeSpecName: "kube-api-access-w9mfm") pod "c84ddadb-263d-4a4a-bc3f-b645c449e392" (UID: "c84ddadb-263d-4a4a-bc3f-b645c449e392"). InnerVolumeSpecName "kube-api-access-w9mfm". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.041784 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1489eff7-41ff-420a-bce0-14247f8554ee-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "1489eff7-41ff-420a-bce0-14247f8554ee" (UID: "1489eff7-41ff-420a-bce0-14247f8554ee"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.041802 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/71099fbe-349d-4a04-857c-41f270ec89af-kube-api-access-xdthx" (OuterVolumeSpecName: "kube-api-access-xdthx") pod "71099fbe-349d-4a04-857c-41f270ec89af" (UID: "71099fbe-349d-4a04-857c-41f270ec89af"). InnerVolumeSpecName "kube-api-access-xdthx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.042062 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1489eff7-41ff-420a-bce0-14247f8554ee-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "1489eff7-41ff-420a-bce0-14247f8554ee" (UID: "1489eff7-41ff-420a-bce0-14247f8554ee"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.045006 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ac42434c-8367-4cf2-9134-2d85444f90f4-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "ac42434c-8367-4cf2-9134-2d85444f90f4" (UID: "ac42434c-8367-4cf2-9134-2d85444f90f4"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.045077 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c84ddadb-263d-4a4a-bc3f-b645c449e392-logs" (OuterVolumeSpecName: "logs") pod "c84ddadb-263d-4a4a-bc3f-b645c449e392" (UID: "c84ddadb-263d-4a4a-bc3f-b645c449e392"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.043347 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.043229 4757 generic.go:334] "Generic (PLEG): container finished" podID="542090f5-d2d8-4f78-b566-10e9885c341e" containerID="bb3ce5af963b7888d8a2f2bac501984a31d2cf21692061032094f06beedecf66" exitCode=0 Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.043262 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"542090f5-d2d8-4f78-b566-10e9885c341e","Type":"ContainerDied","Data":"bb3ce5af963b7888d8a2f2bac501984a31d2cf21692061032094f06beedecf66"} Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.047495 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"542090f5-d2d8-4f78-b566-10e9885c341e","Type":"ContainerDied","Data":"49aea83301219707ca62b9e2d031d5d0032f9e74b8c6e1b344c23c65ee0d1fc3"} Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.047662 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1489eff7-41ff-420a-bce0-14247f8554ee-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "1489eff7-41ff-420a-bce0-14247f8554ee" (UID: "1489eff7-41ff-420a-bce0-14247f8554ee"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.047840 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dffbfc0a-c4ec-41cc-873b-552bc6b7fa69-kube-api-access-q9xkr" (OuterVolumeSpecName: "kube-api-access-q9xkr") pod "dffbfc0a-c4ec-41cc-873b-552bc6b7fa69" (UID: "dffbfc0a-c4ec-41cc-873b-552bc6b7fa69"). InnerVolumeSpecName "kube-api-access-q9xkr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.056795 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1489eff7-41ff-420a-bce0-14247f8554ee-kube-api-access-k9plq" (OuterVolumeSpecName: "kube-api-access-k9plq") pod "1489eff7-41ff-420a-bce0-14247f8554ee" (UID: "1489eff7-41ff-420a-bce0-14247f8554ee"). InnerVolumeSpecName "kube-api-access-k9plq". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.061016 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1489eff7-41ff-420a-bce0-14247f8554ee-secrets" (OuterVolumeSpecName: "secrets") pod "1489eff7-41ff-420a-bce0-14247f8554ee" (UID: "1489eff7-41ff-420a-bce0-14247f8554ee"). InnerVolumeSpecName "secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.067516 4757 generic.go:334] "Generic (PLEG): container finished" podID="71099fbe-349d-4a04-857c-41f270ec89af" containerID="ab2102972a92e9dd4b82cbd0edb8ec4655819cab11c8d022f7994573563037f4" exitCode=0 Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.067610 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"71099fbe-349d-4a04-857c-41f270ec89af","Type":"ContainerDied","Data":"ab2102972a92e9dd4b82cbd0edb8ec4655819cab11c8d022f7994573563037f4"} Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.067641 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"71099fbe-349d-4a04-857c-41f270ec89af","Type":"ContainerDied","Data":"e9d33f287fea78d68b9a304f23b081e764bc6d18869fc2bedcbd94d8b4d6083a"} Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.067708 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.070682 4757 scope.go:117] "RemoveContainer" containerID="0cb8bd1a995fc5d7d87c203ebb692a3ce29bbd8e20024032b2050c0645baebc6" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.071344 4757 generic.go:334] "Generic (PLEG): container finished" podID="c84ddadb-263d-4a4a-bc3f-b645c449e392" containerID="19bef65829342cf5b8843924bb989c79fcfe0aebe827b2d20e1c06064c84e886" exitCode=0 Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.071390 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"c84ddadb-263d-4a4a-bc3f-b645c449e392","Type":"ContainerDied","Data":"19bef65829342cf5b8843924bb989c79fcfe0aebe827b2d20e1c06064c84e886"} Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.071427 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"c84ddadb-263d-4a4a-bc3f-b645c449e392","Type":"ContainerDied","Data":"6bf5e94750ff6b0139f3e6e340904b34fbedc17adfa35a34ca143177362a95be"} Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.071473 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.074401 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-6f4c564f58-t9kxj" event={"ID":"985540de-3212-41f4-a3a6-180ff5c4eda2","Type":"ContainerDied","Data":"50035a0bd6893b397a6cb41f3bd4c88d6dca2b389500cf6b52e4bf691af6e8bd"} Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.074554 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-6f4c564f58-t9kxj" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.080673 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"78a4414b-7eec-457f-b08c-aeb719ffc320","Type":"ContainerDied","Data":"41030caccef7e2b1af92bd1ad914e58506845cf400fccf7b0184bf3f70f62852"} Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.080763 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.092297 4757 generic.go:334] "Generic (PLEG): container finished" podID="ac42434c-8367-4cf2-9134-2d85444f90f4" containerID="9fb3558d9cee93fdd2b8e54924363c8bbc9ddb4127d50abe466cbb11eaf6f296" exitCode=0 Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.092372 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5c78d5595b-89h8z" event={"ID":"ac42434c-8367-4cf2-9134-2d85444f90f4","Type":"ContainerDied","Data":"9fb3558d9cee93fdd2b8e54924363c8bbc9ddb4127d50abe466cbb11eaf6f296"} Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.092398 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5c78d5595b-89h8z" event={"ID":"ac42434c-8367-4cf2-9134-2d85444f90f4","Type":"ContainerDied","Data":"4270f7b5d7acb50511b04591cd3d9b54a3c2e3e74fb4dac7c77ba3938f308840"} Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.092456 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5c78d5595b-89h8z" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.093417 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage11-crc" (OuterVolumeSpecName: "mysql-db") pod "1489eff7-41ff-420a-bce0-14247f8554ee" (UID: "1489eff7-41ff-420a-bce0-14247f8554ee"). InnerVolumeSpecName "local-storage11-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.098543 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron2bb5-account-delete-r8kww" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.098562 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron2bb5-account-delete-r8kww" event={"ID":"dffbfc0a-c4ec-41cc-873b-552bc6b7fa69","Type":"ContainerDied","Data":"37eaf9ca56a0cec49c024bca928dd0c3ae217f772bcb928eb42a803e3aee9250"} Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.098603 4757 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="37eaf9ca56a0cec49c024bca928dd0c3ae217f772bcb928eb42a803e3aee9250" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.099281 4757 scope.go:117] "RemoveContainer" containerID="501ec0020527b58bea34dc6aa1f5770171c8c96af98e8cc181a6f0a30690207d" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.104685 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/novaapid1d1-account-delete-j7wzz" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.105360 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novaapid1d1-account-delete-j7wzz" event={"ID":"24ed1753-25de-4bde-8158-52cb7dd6a2f1","Type":"ContainerDied","Data":"7635fb16ef8f1e32964fecb1d97c5ab0e2ed1ae89d068c9b38c0ab1825343220"} Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.105409 4757 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7635fb16ef8f1e32964fecb1d97c5ab0e2ed1ae89d068c9b38c0ab1825343220" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.134496 4757 generic.go:334] "Generic (PLEG): container finished" podID="1489eff7-41ff-420a-bce0-14247f8554ee" containerID="68027503c2ce696a23bbe79112eb3c8e940f784eaa625ef9ab155311edc1e2dc" exitCode=0 Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.134569 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.134587 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"1489eff7-41ff-420a-bce0-14247f8554ee","Type":"ContainerDied","Data":"68027503c2ce696a23bbe79112eb3c8e940f784eaa625ef9ab155311edc1e2dc"} Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.134613 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"1489eff7-41ff-420a-bce0-14247f8554ee","Type":"ContainerDied","Data":"eb11584b190b8ceeb18b079a27a3e4925b216563cf949028e500b4d81a7bb636"} Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.134577 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone2941-account-delete-kjhx5" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.134876 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/novacell00cee-account-delete-2rcjp" podUID="7904f7d1-2332-4402-bd0b-4a40f5be43f9" containerName="mariadb-account-delete" containerID="cri-o://df8d26e861507a6ea9c35de1268390d49242824b4f8f3a8fc3d5c38f8370d3dd" gracePeriod=30 Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.136418 4757 scope.go:117] "RemoveContainer" containerID="832893f98af9abff959fc064ee7dc85ca2245d5e8f5d1e911ffd6cf81dcf776b" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.138661 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xdthx\" (UniqueName: \"kubernetes.io/projected/71099fbe-349d-4a04-857c-41f270ec89af-kube-api-access-xdthx\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.138681 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9mfm\" (UniqueName: \"kubernetes.io/projected/c84ddadb-263d-4a4a-bc3f-b645c449e392-kube-api-access-w9mfm\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.138690 4757 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ac42434c-8367-4cf2-9134-2d85444f90f4-logs\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.138700 4757 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/1489eff7-41ff-420a-bce0-14247f8554ee-kolla-config\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.138711 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-96c87\" (UniqueName: \"kubernetes.io/projected/ac42434c-8367-4cf2-9134-2d85444f90f4-kube-api-access-96c87\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.138753 4757 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" " Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.138762 4757 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1489eff7-41ff-420a-bce0-14247f8554ee-operator-scripts\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.138770 4757 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ac42434c-8367-4cf2-9134-2d85444f90f4-config-data-custom\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.138778 4757 reconciler_common.go:293] "Volume detached for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/1489eff7-41ff-420a-bce0-14247f8554ee-secrets\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.138786 4757 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/1489eff7-41ff-420a-bce0-14247f8554ee-config-data-generated\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.138810 4757 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c84ddadb-263d-4a4a-bc3f-b645c449e392-logs\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.138821 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q9xkr\" (UniqueName: \"kubernetes.io/projected/dffbfc0a-c4ec-41cc-873b-552bc6b7fa69-kube-api-access-q9xkr\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.138829 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k9plq\" (UniqueName: \"kubernetes.io/projected/1489eff7-41ff-420a-bce0-14247f8554ee-kube-api-access-k9plq\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.156190 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/542090f5-d2d8-4f78-b566-10e9885c341e-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "542090f5-d2d8-4f78-b566-10e9885c341e" (UID: "542090f5-d2d8-4f78-b566-10e9885c341e"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.165872 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-ff5468974-c5722"] Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.166009 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/542090f5-d2d8-4f78-b566-10e9885c341e-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "542090f5-d2d8-4f78-b566-10e9885c341e" (UID: "542090f5-d2d8-4f78-b566-10e9885c341e"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.169683 4757 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage11-crc" (UniqueName: "kubernetes.io/local-volume/local-storage11-crc") on node "crc" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.180275 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-ff5468974-c5722"] Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.187954 4757 scope.go:117] "RemoveContainer" containerID="85aed47e17be02089454ae709bb98e02a653cd57a389079af7c1e794005db080" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.199712 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/71099fbe-349d-4a04-857c-41f270ec89af-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "71099fbe-349d-4a04-857c-41f270ec89af" (UID: "71099fbe-349d-4a04-857c-41f270ec89af"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.202283 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="27a36069-f211-4f8e-9512-f9e08f252f5d" path="/var/lib/kubelet/pods/27a36069-f211-4f8e-9512-f9e08f252f5d/volumes" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.203225 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2d124778-aecd-4366-9a84-ab8c2cb478b8" path="/var/lib/kubelet/pods/2d124778-aecd-4366-9a84-ab8c2cb478b8/volumes" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.204075 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="39ef4372-3b20-44b5-b441-85f963e6a25a" path="/var/lib/kubelet/pods/39ef4372-3b20-44b5-b441-85f963e6a25a/volumes" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.205702 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4a8780a9-61ea-43c8-a052-2e853328cb11" path="/var/lib/kubelet/pods/4a8780a9-61ea-43c8-a052-2e853328cb11/volumes" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.206601 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="562413a3-660e-4ed9-92d6-23cb7d84b936" path="/var/lib/kubelet/pods/562413a3-660e-4ed9-92d6-23cb7d84b936/volumes" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.207488 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="61d68e86-89f3-4dc6-bb42-7286c789fbba" path="/var/lib/kubelet/pods/61d68e86-89f3-4dc6-bb42-7286c789fbba/volumes" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.209067 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a21e8c5a-5819-4e8c-9b20-5353625fc36b" path="/var/lib/kubelet/pods/a21e8c5a-5819-4e8c-9b20-5353625fc36b/volumes" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.210021 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="afc51d15-69dd-4900-886c-29a4f372df24" path="/var/lib/kubelet/pods/afc51d15-69dd-4900-886c-29a4f372df24/volumes" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.210802 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c5b34617-b163-4b7a-9950-53f64a8cca2c" path="/var/lib/kubelet/pods/c5b34617-b163-4b7a-9950-53f64a8cca2c/volumes" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.211448 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d2b1e842-4aee-4ab7-97ad-6407e2d6834d" path="/var/lib/kubelet/pods/d2b1e842-4aee-4ab7-97ad-6407e2d6834d/volumes" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.212561 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e3515117-5db7-4811-a21b-39aaaf03eda0" path="/var/lib/kubelet/pods/e3515117-5db7-4811-a21b-39aaaf03eda0/volumes" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.213219 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ec6e1479-3b96-4c4b-be95-5834172d37ff" path="/var/lib/kubelet/pods/ec6e1479-3b96-4c4b-be95-5834172d37ff/volumes" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.213832 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fbc7bc9f-cc73-4943-94fd-c7288e7efb52" path="/var/lib/kubelet/pods/fbc7bc9f-cc73-4943-94fd-c7288e7efb52/volumes" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.217891 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c84ddadb-263d-4a4a-bc3f-b645c449e392-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c84ddadb-263d-4a4a-bc3f-b645c449e392" (UID: "c84ddadb-263d-4a4a-bc3f-b645c449e392"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.222653 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1489eff7-41ff-420a-bce0-14247f8554ee-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1489eff7-41ff-420a-bce0-14247f8554ee" (UID: "1489eff7-41ff-420a-bce0-14247f8554ee"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.226151 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.226206 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.226226 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-keystone-listener-6f4c564f58-t9kxj"] Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.228462 4757 scope.go:117] "RemoveContainer" containerID="d10bb205702b34e04df8f3d4dd7f8051ac798a998c12a2c61384341390d931e9" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.228791 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-keystone-listener-6f4c564f58-t9kxj"] Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.231298 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ac42434c-8367-4cf2-9134-2d85444f90f4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ac42434c-8367-4cf2-9134-2d85444f90f4" (UID: "ac42434c-8367-4cf2-9134-2d85444f90f4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.240432 4757 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/71099fbe-349d-4a04-857c-41f270ec89af-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.240467 4757 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c84ddadb-263d-4a4a-bc3f-b645c449e392-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.240479 4757 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/542090f5-d2d8-4f78-b566-10e9885c341e-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.240491 4757 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ac42434c-8367-4cf2-9134-2d85444f90f4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.240503 4757 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/542090f5-d2d8-4f78-b566-10e9885c341e-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.240514 4757 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1489eff7-41ff-420a-bce0-14247f8554ee-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.240526 4757 reconciler_common.go:293] "Volume detached for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.248181 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.249990 4757 scope.go:117] "RemoveContainer" containerID="bb3ce5af963b7888d8a2f2bac501984a31d2cf21692061032094f06beedecf66" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.254042 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.256958 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/542090f5-d2d8-4f78-b566-10e9885c341e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "542090f5-d2d8-4f78-b566-10e9885c341e" (UID: "542090f5-d2d8-4f78-b566-10e9885c341e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.262174 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/novaapid1d1-account-delete-j7wzz"] Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.266583 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/novaapid1d1-account-delete-j7wzz"] Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.270634 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron2bb5-account-delete-r8kww"] Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.272770 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1489eff7-41ff-420a-bce0-14247f8554ee-galera-tls-certs" (OuterVolumeSpecName: "galera-tls-certs") pod "1489eff7-41ff-420a-bce0-14247f8554ee" (UID: "1489eff7-41ff-420a-bce0-14247f8554ee"). InnerVolumeSpecName "galera-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.272808 4757 scope.go:117] "RemoveContainer" containerID="d0a1424bce5344057fa545a1be5e25b8f2ea9bb5d886078a67599c0c2c3b96eb" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.272800 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c84ddadb-263d-4a4a-bc3f-b645c449e392-config-data" (OuterVolumeSpecName: "config-data") pod "c84ddadb-263d-4a4a-bc3f-b645c449e392" (UID: "c84ddadb-263d-4a4a-bc3f-b645c449e392"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.280127 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron2bb5-account-delete-r8kww"] Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.283222 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/71099fbe-349d-4a04-857c-41f270ec89af-config-data" (OuterVolumeSpecName: "config-data") pod "71099fbe-349d-4a04-857c-41f270ec89af" (UID: "71099fbe-349d-4a04-857c-41f270ec89af"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.286279 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ac42434c-8367-4cf2-9134-2d85444f90f4-config-data" (OuterVolumeSpecName: "config-data") pod "ac42434c-8367-4cf2-9134-2d85444f90f4" (UID: "ac42434c-8367-4cf2-9134-2d85444f90f4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.286847 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone2941-account-delete-kjhx5"] Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.291180 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ac42434c-8367-4cf2-9134-2d85444f90f4-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "ac42434c-8367-4cf2-9134-2d85444f90f4" (UID: "ac42434c-8367-4cf2-9134-2d85444f90f4"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.292501 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone2941-account-delete-kjhx5"] Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.293857 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ac42434c-8367-4cf2-9134-2d85444f90f4-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "ac42434c-8367-4cf2-9134-2d85444f90f4" (UID: "ac42434c-8367-4cf2-9134-2d85444f90f4"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.295191 4757 scope.go:117] "RemoveContainer" containerID="85aed47e17be02089454ae709bb98e02a653cd57a389079af7c1e794005db080" Oct 06 14:00:26 crc kubenswrapper[4757]: E1006 14:00:26.296538 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"85aed47e17be02089454ae709bb98e02a653cd57a389079af7c1e794005db080\": container with ID starting with 85aed47e17be02089454ae709bb98e02a653cd57a389079af7c1e794005db080 not found: ID does not exist" containerID="85aed47e17be02089454ae709bb98e02a653cd57a389079af7c1e794005db080" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.296574 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"85aed47e17be02089454ae709bb98e02a653cd57a389079af7c1e794005db080"} err="failed to get container status \"85aed47e17be02089454ae709bb98e02a653cd57a389079af7c1e794005db080\": rpc error: code = NotFound desc = could not find container \"85aed47e17be02089454ae709bb98e02a653cd57a389079af7c1e794005db080\": container with ID starting with 85aed47e17be02089454ae709bb98e02a653cd57a389079af7c1e794005db080 not found: ID does not exist" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.296608 4757 scope.go:117] "RemoveContainer" containerID="d10bb205702b34e04df8f3d4dd7f8051ac798a998c12a2c61384341390d931e9" Oct 06 14:00:26 crc kubenswrapper[4757]: E1006 14:00:26.296890 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d10bb205702b34e04df8f3d4dd7f8051ac798a998c12a2c61384341390d931e9\": container with ID starting with d10bb205702b34e04df8f3d4dd7f8051ac798a998c12a2c61384341390d931e9 not found: ID does not exist" containerID="d10bb205702b34e04df8f3d4dd7f8051ac798a998c12a2c61384341390d931e9" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.296949 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d10bb205702b34e04df8f3d4dd7f8051ac798a998c12a2c61384341390d931e9"} err="failed to get container status \"d10bb205702b34e04df8f3d4dd7f8051ac798a998c12a2c61384341390d931e9\": rpc error: code = NotFound desc = could not find container \"d10bb205702b34e04df8f3d4dd7f8051ac798a998c12a2c61384341390d931e9\": container with ID starting with d10bb205702b34e04df8f3d4dd7f8051ac798a998c12a2c61384341390d931e9 not found: ID does not exist" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.296972 4757 scope.go:117] "RemoveContainer" containerID="bb3ce5af963b7888d8a2f2bac501984a31d2cf21692061032094f06beedecf66" Oct 06 14:00:26 crc kubenswrapper[4757]: E1006 14:00:26.297305 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bb3ce5af963b7888d8a2f2bac501984a31d2cf21692061032094f06beedecf66\": container with ID starting with bb3ce5af963b7888d8a2f2bac501984a31d2cf21692061032094f06beedecf66 not found: ID does not exist" containerID="bb3ce5af963b7888d8a2f2bac501984a31d2cf21692061032094f06beedecf66" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.297340 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bb3ce5af963b7888d8a2f2bac501984a31d2cf21692061032094f06beedecf66"} err="failed to get container status \"bb3ce5af963b7888d8a2f2bac501984a31d2cf21692061032094f06beedecf66\": rpc error: code = NotFound desc = could not find container \"bb3ce5af963b7888d8a2f2bac501984a31d2cf21692061032094f06beedecf66\": container with ID starting with bb3ce5af963b7888d8a2f2bac501984a31d2cf21692061032094f06beedecf66 not found: ID does not exist" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.297391 4757 scope.go:117] "RemoveContainer" containerID="d0a1424bce5344057fa545a1be5e25b8f2ea9bb5d886078a67599c0c2c3b96eb" Oct 06 14:00:26 crc kubenswrapper[4757]: E1006 14:00:26.297677 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d0a1424bce5344057fa545a1be5e25b8f2ea9bb5d886078a67599c0c2c3b96eb\": container with ID starting with d0a1424bce5344057fa545a1be5e25b8f2ea9bb5d886078a67599c0c2c3b96eb not found: ID does not exist" containerID="d0a1424bce5344057fa545a1be5e25b8f2ea9bb5d886078a67599c0c2c3b96eb" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.297703 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d0a1424bce5344057fa545a1be5e25b8f2ea9bb5d886078a67599c0c2c3b96eb"} err="failed to get container status \"d0a1424bce5344057fa545a1be5e25b8f2ea9bb5d886078a67599c0c2c3b96eb\": rpc error: code = NotFound desc = could not find container \"d0a1424bce5344057fa545a1be5e25b8f2ea9bb5d886078a67599c0c2c3b96eb\": container with ID starting with d0a1424bce5344057fa545a1be5e25b8f2ea9bb5d886078a67599c0c2c3b96eb not found: ID does not exist" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.297718 4757 scope.go:117] "RemoveContainer" containerID="ab2102972a92e9dd4b82cbd0edb8ec4655819cab11c8d022f7994573563037f4" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.300293 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c84ddadb-263d-4a4a-bc3f-b645c449e392-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "c84ddadb-263d-4a4a-bc3f-b645c449e392" (UID: "c84ddadb-263d-4a4a-bc3f-b645c449e392"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.300354 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c84ddadb-263d-4a4a-bc3f-b645c449e392-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "c84ddadb-263d-4a4a-bc3f-b645c449e392" (UID: "c84ddadb-263d-4a4a-bc3f-b645c449e392"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.308387 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/542090f5-d2d8-4f78-b566-10e9885c341e-config-data" (OuterVolumeSpecName: "config-data") pod "542090f5-d2d8-4f78-b566-10e9885c341e" (UID: "542090f5-d2d8-4f78-b566-10e9885c341e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.342591 4757 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c84ddadb-263d-4a4a-bc3f-b645c449e392-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.342629 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fpk6k\" (UniqueName: \"kubernetes.io/projected/df034a5b-6848-45e4-9e1b-8613a9ab590f-kube-api-access-fpk6k\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.342640 4757 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/542090f5-d2d8-4f78-b566-10e9885c341e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.342649 4757 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/71099fbe-349d-4a04-857c-41f270ec89af-config-data\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.342656 4757 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/542090f5-d2d8-4f78-b566-10e9885c341e-config-data\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.342665 4757 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c84ddadb-263d-4a4a-bc3f-b645c449e392-config-data\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.342673 4757 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ac42434c-8367-4cf2-9134-2d85444f90f4-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.342681 4757 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ac42434c-8367-4cf2-9134-2d85444f90f4-public-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.342689 4757 reconciler_common.go:293] "Volume detached for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/1489eff7-41ff-420a-bce0-14247f8554ee-galera-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.342697 4757 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c84ddadb-263d-4a4a-bc3f-b645c449e392-public-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.342708 4757 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ac42434c-8367-4cf2-9134-2d85444f90f4-config-data\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.346352 4757 scope.go:117] "RemoveContainer" containerID="ab2102972a92e9dd4b82cbd0edb8ec4655819cab11c8d022f7994573563037f4" Oct 06 14:00:26 crc kubenswrapper[4757]: E1006 14:00:26.353398 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ab2102972a92e9dd4b82cbd0edb8ec4655819cab11c8d022f7994573563037f4\": container with ID starting with ab2102972a92e9dd4b82cbd0edb8ec4655819cab11c8d022f7994573563037f4 not found: ID does not exist" containerID="ab2102972a92e9dd4b82cbd0edb8ec4655819cab11c8d022f7994573563037f4" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.353421 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ab2102972a92e9dd4b82cbd0edb8ec4655819cab11c8d022f7994573563037f4"} err="failed to get container status \"ab2102972a92e9dd4b82cbd0edb8ec4655819cab11c8d022f7994573563037f4\": rpc error: code = NotFound desc = could not find container \"ab2102972a92e9dd4b82cbd0edb8ec4655819cab11c8d022f7994573563037f4\": container with ID starting with ab2102972a92e9dd4b82cbd0edb8ec4655819cab11c8d022f7994573563037f4 not found: ID does not exist" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.353442 4757 scope.go:117] "RemoveContainer" containerID="19bef65829342cf5b8843924bb989c79fcfe0aebe827b2d20e1c06064c84e886" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.467332 4757 scope.go:117] "RemoveContainer" containerID="7472b490592c5f3cc0283af1e322f1bf86e9549fc11e557328a21eca6cdc47ab" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.501804 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.512190 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.515601 4757 scope.go:117] "RemoveContainer" containerID="19bef65829342cf5b8843924bb989c79fcfe0aebe827b2d20e1c06064c84e886" Oct 06 14:00:26 crc kubenswrapper[4757]: E1006 14:00:26.516136 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"19bef65829342cf5b8843924bb989c79fcfe0aebe827b2d20e1c06064c84e886\": container with ID starting with 19bef65829342cf5b8843924bb989c79fcfe0aebe827b2d20e1c06064c84e886 not found: ID does not exist" containerID="19bef65829342cf5b8843924bb989c79fcfe0aebe827b2d20e1c06064c84e886" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.516188 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"19bef65829342cf5b8843924bb989c79fcfe0aebe827b2d20e1c06064c84e886"} err="failed to get container status \"19bef65829342cf5b8843924bb989c79fcfe0aebe827b2d20e1c06064c84e886\": rpc error: code = NotFound desc = could not find container \"19bef65829342cf5b8843924bb989c79fcfe0aebe827b2d20e1c06064c84e886\": container with ID starting with 19bef65829342cf5b8843924bb989c79fcfe0aebe827b2d20e1c06064c84e886 not found: ID does not exist" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.517228 4757 scope.go:117] "RemoveContainer" containerID="7472b490592c5f3cc0283af1e322f1bf86e9549fc11e557328a21eca6cdc47ab" Oct 06 14:00:26 crc kubenswrapper[4757]: E1006 14:00:26.518044 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7472b490592c5f3cc0283af1e322f1bf86e9549fc11e557328a21eca6cdc47ab\": container with ID starting with 7472b490592c5f3cc0283af1e322f1bf86e9549fc11e557328a21eca6cdc47ab not found: ID does not exist" containerID="7472b490592c5f3cc0283af1e322f1bf86e9549fc11e557328a21eca6cdc47ab" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.518076 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7472b490592c5f3cc0283af1e322f1bf86e9549fc11e557328a21eca6cdc47ab"} err="failed to get container status \"7472b490592c5f3cc0283af1e322f1bf86e9549fc11e557328a21eca6cdc47ab\": rpc error: code = NotFound desc = could not find container \"7472b490592c5f3cc0283af1e322f1bf86e9549fc11e557328a21eca6cdc47ab\": container with ID starting with 7472b490592c5f3cc0283af1e322f1bf86e9549fc11e557328a21eca6cdc47ab not found: ID does not exist" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.518136 4757 scope.go:117] "RemoveContainer" containerID="103f1cb6b966566428982058c2497d82d3412e2a56a570fe97a8556bbaf90b4e" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.520966 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.531554 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.537975 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-5c78d5595b-89h8z"] Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.546345 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-5c78d5595b-89h8z"] Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.551786 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.555581 4757 scope.go:117] "RemoveContainer" containerID="8657327aee1067ffca05e4e9c577e2dddc6b63823ba1e0a17d6ec6a140fc3cc0" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.560753 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.570242 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstack-galera-0"] Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.574325 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstack-galera-0"] Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.582050 4757 scope.go:117] "RemoveContainer" containerID="627201afc996e1b5ebc9ec2b49081817f4957580a028ef0ede8ac13918294d2e" Oct 06 14:00:26 crc kubenswrapper[4757]: E1006 14:00:26.591913 4757 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 21bc61d474e55f8578f65885857cf7de3c24140a6afe332ad8c23da23b86ee86 is running failed: container process not found" containerID="21bc61d474e55f8578f65885857cf7de3c24140a6afe332ad8c23da23b86ee86" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Oct 06 14:00:26 crc kubenswrapper[4757]: E1006 14:00:26.592397 4757 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 21bc61d474e55f8578f65885857cf7de3c24140a6afe332ad8c23da23b86ee86 is running failed: container process not found" containerID="21bc61d474e55f8578f65885857cf7de3c24140a6afe332ad8c23da23b86ee86" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Oct 06 14:00:26 crc kubenswrapper[4757]: E1006 14:00:26.592767 4757 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 21bc61d474e55f8578f65885857cf7de3c24140a6afe332ad8c23da23b86ee86 is running failed: container process not found" containerID="21bc61d474e55f8578f65885857cf7de3c24140a6afe332ad8c23da23b86ee86" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Oct 06 14:00:26 crc kubenswrapper[4757]: E1006 14:00:26.592798 4757 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 21bc61d474e55f8578f65885857cf7de3c24140a6afe332ad8c23da23b86ee86 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-fqwwx" podUID="3482c1c4-b15b-46cb-a897-3528fa22adda" containerName="ovsdb-server" Oct 06 14:00:26 crc kubenswrapper[4757]: E1006 14:00:26.595798 4757 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="ca5e23427b9e45a1476331fc4c8830e07091551016296db6fab1e5fc1115f6e4" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Oct 06 14:00:26 crc kubenswrapper[4757]: E1006 14:00:26.601599 4757 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="ca5e23427b9e45a1476331fc4c8830e07091551016296db6fab1e5fc1115f6e4" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.603888 4757 scope.go:117] "RemoveContainer" containerID="17ce35871062aaa7a1aa4139c7f03addfd9e5a2514de88d4e098daa173fc41a8" Oct 06 14:00:26 crc kubenswrapper[4757]: E1006 14:00:26.604170 4757 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="ca5e23427b9e45a1476331fc4c8830e07091551016296db6fab1e5fc1115f6e4" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Oct 06 14:00:26 crc kubenswrapper[4757]: E1006 14:00:26.604213 4757 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-fqwwx" podUID="3482c1c4-b15b-46cb-a897-3528fa22adda" containerName="ovs-vswitchd" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.630753 4757 scope.go:117] "RemoveContainer" containerID="9fb3558d9cee93fdd2b8e54924363c8bbc9ddb4127d50abe466cbb11eaf6f296" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.649733 4757 scope.go:117] "RemoveContainer" containerID="92484d139159526d958a23b09b865ab4b1219b4eb5727898baa360c9ff0a42c2" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.664675 4757 scope.go:117] "RemoveContainer" containerID="9fb3558d9cee93fdd2b8e54924363c8bbc9ddb4127d50abe466cbb11eaf6f296" Oct 06 14:00:26 crc kubenswrapper[4757]: E1006 14:00:26.665214 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9fb3558d9cee93fdd2b8e54924363c8bbc9ddb4127d50abe466cbb11eaf6f296\": container with ID starting with 9fb3558d9cee93fdd2b8e54924363c8bbc9ddb4127d50abe466cbb11eaf6f296 not found: ID does not exist" containerID="9fb3558d9cee93fdd2b8e54924363c8bbc9ddb4127d50abe466cbb11eaf6f296" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.665252 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9fb3558d9cee93fdd2b8e54924363c8bbc9ddb4127d50abe466cbb11eaf6f296"} err="failed to get container status \"9fb3558d9cee93fdd2b8e54924363c8bbc9ddb4127d50abe466cbb11eaf6f296\": rpc error: code = NotFound desc = could not find container \"9fb3558d9cee93fdd2b8e54924363c8bbc9ddb4127d50abe466cbb11eaf6f296\": container with ID starting with 9fb3558d9cee93fdd2b8e54924363c8bbc9ddb4127d50abe466cbb11eaf6f296 not found: ID does not exist" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.665278 4757 scope.go:117] "RemoveContainer" containerID="92484d139159526d958a23b09b865ab4b1219b4eb5727898baa360c9ff0a42c2" Oct 06 14:00:26 crc kubenswrapper[4757]: E1006 14:00:26.665480 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"92484d139159526d958a23b09b865ab4b1219b4eb5727898baa360c9ff0a42c2\": container with ID starting with 92484d139159526d958a23b09b865ab4b1219b4eb5727898baa360c9ff0a42c2 not found: ID does not exist" containerID="92484d139159526d958a23b09b865ab4b1219b4eb5727898baa360c9ff0a42c2" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.665496 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"92484d139159526d958a23b09b865ab4b1219b4eb5727898baa360c9ff0a42c2"} err="failed to get container status \"92484d139159526d958a23b09b865ab4b1219b4eb5727898baa360c9ff0a42c2\": rpc error: code = NotFound desc = could not find container \"92484d139159526d958a23b09b865ab4b1219b4eb5727898baa360c9ff0a42c2\": container with ID starting with 92484d139159526d958a23b09b865ab4b1219b4eb5727898baa360c9ff0a42c2 not found: ID does not exist" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.665509 4757 scope.go:117] "RemoveContainer" containerID="68027503c2ce696a23bbe79112eb3c8e940f784eaa625ef9ab155311edc1e2dc" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.681651 4757 scope.go:117] "RemoveContainer" containerID="9567ee2ffcb88c85096696bc683385a2aadc2e9ff047359f743b2e0a41cd15c7" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.708575 4757 scope.go:117] "RemoveContainer" containerID="68027503c2ce696a23bbe79112eb3c8e940f784eaa625ef9ab155311edc1e2dc" Oct 06 14:00:26 crc kubenswrapper[4757]: E1006 14:00:26.708981 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"68027503c2ce696a23bbe79112eb3c8e940f784eaa625ef9ab155311edc1e2dc\": container with ID starting with 68027503c2ce696a23bbe79112eb3c8e940f784eaa625ef9ab155311edc1e2dc not found: ID does not exist" containerID="68027503c2ce696a23bbe79112eb3c8e940f784eaa625ef9ab155311edc1e2dc" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.709028 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"68027503c2ce696a23bbe79112eb3c8e940f784eaa625ef9ab155311edc1e2dc"} err="failed to get container status \"68027503c2ce696a23bbe79112eb3c8e940f784eaa625ef9ab155311edc1e2dc\": rpc error: code = NotFound desc = could not find container \"68027503c2ce696a23bbe79112eb3c8e940f784eaa625ef9ab155311edc1e2dc\": container with ID starting with 68027503c2ce696a23bbe79112eb3c8e940f784eaa625ef9ab155311edc1e2dc not found: ID does not exist" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.709059 4757 scope.go:117] "RemoveContainer" containerID="9567ee2ffcb88c85096696bc683385a2aadc2e9ff047359f743b2e0a41cd15c7" Oct 06 14:00:26 crc kubenswrapper[4757]: E1006 14:00:26.709919 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9567ee2ffcb88c85096696bc683385a2aadc2e9ff047359f743b2e0a41cd15c7\": container with ID starting with 9567ee2ffcb88c85096696bc683385a2aadc2e9ff047359f743b2e0a41cd15c7 not found: ID does not exist" containerID="9567ee2ffcb88c85096696bc683385a2aadc2e9ff047359f743b2e0a41cd15c7" Oct 06 14:00:26 crc kubenswrapper[4757]: I1006 14:00:26.709995 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9567ee2ffcb88c85096696bc683385a2aadc2e9ff047359f743b2e0a41cd15c7"} err="failed to get container status \"9567ee2ffcb88c85096696bc683385a2aadc2e9ff047359f743b2e0a41cd15c7\": rpc error: code = NotFound desc = could not find container \"9567ee2ffcb88c85096696bc683385a2aadc2e9ff047359f743b2e0a41cd15c7\": container with ID starting with 9567ee2ffcb88c85096696bc683385a2aadc2e9ff047359f743b2e0a41cd15c7 not found: ID does not exist" Oct 06 14:00:27 crc kubenswrapper[4757]: E1006 14:00:27.073928 4757 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Oct 06 14:00:27 crc kubenswrapper[4757]: E1006 14:00:27.074309 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7-config-data podName:0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7 nodeName:}" failed. No retries permitted until 2025-10-06 14:00:35.074293875 +0000 UTC m=+1323.571612412 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7-config-data") pod "rabbitmq-server-0" (UID: "0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7") : configmap "rabbitmq-config-data" not found Oct 06 14:00:27 crc kubenswrapper[4757]: I1006 14:00:27.105476 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-678769d845-d782m" Oct 06 14:00:27 crc kubenswrapper[4757]: I1006 14:00:27.155256 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_cc01b313-87cb-44f6-9c85-84ae4931e1f6/ovn-northd/0.log" Oct 06 14:00:27 crc kubenswrapper[4757]: I1006 14:00:27.155315 4757 generic.go:334] "Generic (PLEG): container finished" podID="cc01b313-87cb-44f6-9c85-84ae4931e1f6" containerID="1c6cdcbf807850c39d917f526c3686bad9485f567af58c814535019523f2074b" exitCode=139 Oct 06 14:00:27 crc kubenswrapper[4757]: I1006 14:00:27.155394 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"cc01b313-87cb-44f6-9c85-84ae4931e1f6","Type":"ContainerDied","Data":"1c6cdcbf807850c39d917f526c3686bad9485f567af58c814535019523f2074b"} Oct 06 14:00:27 crc kubenswrapper[4757]: I1006 14:00:27.174538 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rfbzb\" (UniqueName: \"kubernetes.io/projected/80bdd62a-4024-4734-9ca0-a97f2bae29c3-kube-api-access-rfbzb\") pod \"80bdd62a-4024-4734-9ca0-a97f2bae29c3\" (UID: \"80bdd62a-4024-4734-9ca0-a97f2bae29c3\") " Oct 06 14:00:27 crc kubenswrapper[4757]: I1006 14:00:27.174573 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/80bdd62a-4024-4734-9ca0-a97f2bae29c3-config-data\") pod \"80bdd62a-4024-4734-9ca0-a97f2bae29c3\" (UID: \"80bdd62a-4024-4734-9ca0-a97f2bae29c3\") " Oct 06 14:00:27 crc kubenswrapper[4757]: I1006 14:00:27.174589 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/80bdd62a-4024-4734-9ca0-a97f2bae29c3-scripts\") pod \"80bdd62a-4024-4734-9ca0-a97f2bae29c3\" (UID: \"80bdd62a-4024-4734-9ca0-a97f2bae29c3\") " Oct 06 14:00:27 crc kubenswrapper[4757]: I1006 14:00:27.174629 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/80bdd62a-4024-4734-9ca0-a97f2bae29c3-internal-tls-certs\") pod \"80bdd62a-4024-4734-9ca0-a97f2bae29c3\" (UID: \"80bdd62a-4024-4734-9ca0-a97f2bae29c3\") " Oct 06 14:00:27 crc kubenswrapper[4757]: I1006 14:00:27.174692 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/80bdd62a-4024-4734-9ca0-a97f2bae29c3-fernet-keys\") pod \"80bdd62a-4024-4734-9ca0-a97f2bae29c3\" (UID: \"80bdd62a-4024-4734-9ca0-a97f2bae29c3\") " Oct 06 14:00:27 crc kubenswrapper[4757]: I1006 14:00:27.174728 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/80bdd62a-4024-4734-9ca0-a97f2bae29c3-public-tls-certs\") pod \"80bdd62a-4024-4734-9ca0-a97f2bae29c3\" (UID: \"80bdd62a-4024-4734-9ca0-a97f2bae29c3\") " Oct 06 14:00:27 crc kubenswrapper[4757]: I1006 14:00:27.175396 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/80bdd62a-4024-4734-9ca0-a97f2bae29c3-credential-keys\") pod \"80bdd62a-4024-4734-9ca0-a97f2bae29c3\" (UID: \"80bdd62a-4024-4734-9ca0-a97f2bae29c3\") " Oct 06 14:00:27 crc kubenswrapper[4757]: I1006 14:00:27.175431 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/80bdd62a-4024-4734-9ca0-a97f2bae29c3-combined-ca-bundle\") pod \"80bdd62a-4024-4734-9ca0-a97f2bae29c3\" (UID: \"80bdd62a-4024-4734-9ca0-a97f2bae29c3\") " Oct 06 14:00:27 crc kubenswrapper[4757]: I1006 14:00:27.183248 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/80bdd62a-4024-4734-9ca0-a97f2bae29c3-scripts" (OuterVolumeSpecName: "scripts") pod "80bdd62a-4024-4734-9ca0-a97f2bae29c3" (UID: "80bdd62a-4024-4734-9ca0-a97f2bae29c3"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 14:00:27 crc kubenswrapper[4757]: I1006 14:00:27.187172 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/80bdd62a-4024-4734-9ca0-a97f2bae29c3-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "80bdd62a-4024-4734-9ca0-a97f2bae29c3" (UID: "80bdd62a-4024-4734-9ca0-a97f2bae29c3"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 14:00:27 crc kubenswrapper[4757]: I1006 14:00:27.187166 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/80bdd62a-4024-4734-9ca0-a97f2bae29c3-kube-api-access-rfbzb" (OuterVolumeSpecName: "kube-api-access-rfbzb") pod "80bdd62a-4024-4734-9ca0-a97f2bae29c3" (UID: "80bdd62a-4024-4734-9ca0-a97f2bae29c3"). InnerVolumeSpecName "kube-api-access-rfbzb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 14:00:27 crc kubenswrapper[4757]: I1006 14:00:27.189251 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/80bdd62a-4024-4734-9ca0-a97f2bae29c3-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "80bdd62a-4024-4734-9ca0-a97f2bae29c3" (UID: "80bdd62a-4024-4734-9ca0-a97f2bae29c3"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 14:00:27 crc kubenswrapper[4757]: I1006 14:00:27.197721 4757 generic.go:334] "Generic (PLEG): container finished" podID="80bdd62a-4024-4734-9ca0-a97f2bae29c3" containerID="31fe86cb318306616d39850b2094828652e5181bae98c6563c8e5d3d8c3552ab" exitCode=0 Oct 06 14:00:27 crc kubenswrapper[4757]: I1006 14:00:27.197766 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-678769d845-d782m" event={"ID":"80bdd62a-4024-4734-9ca0-a97f2bae29c3","Type":"ContainerDied","Data":"31fe86cb318306616d39850b2094828652e5181bae98c6563c8e5d3d8c3552ab"} Oct 06 14:00:27 crc kubenswrapper[4757]: I1006 14:00:27.197825 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-678769d845-d782m" event={"ID":"80bdd62a-4024-4734-9ca0-a97f2bae29c3","Type":"ContainerDied","Data":"09aeacef459637a0281dce4676bdc54627b7826e6ecae435591441cd59a1f043"} Oct 06 14:00:27 crc kubenswrapper[4757]: I1006 14:00:27.197844 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-678769d845-d782m" Oct 06 14:00:27 crc kubenswrapper[4757]: I1006 14:00:27.197847 4757 scope.go:117] "RemoveContainer" containerID="31fe86cb318306616d39850b2094828652e5181bae98c6563c8e5d3d8c3552ab" Oct 06 14:00:27 crc kubenswrapper[4757]: I1006 14:00:27.206694 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/80bdd62a-4024-4734-9ca0-a97f2bae29c3-config-data" (OuterVolumeSpecName: "config-data") pod "80bdd62a-4024-4734-9ca0-a97f2bae29c3" (UID: "80bdd62a-4024-4734-9ca0-a97f2bae29c3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 14:00:27 crc kubenswrapper[4757]: I1006 14:00:27.207314 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/80bdd62a-4024-4734-9ca0-a97f2bae29c3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "80bdd62a-4024-4734-9ca0-a97f2bae29c3" (UID: "80bdd62a-4024-4734-9ca0-a97f2bae29c3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 14:00:27 crc kubenswrapper[4757]: I1006 14:00:27.221732 4757 scope.go:117] "RemoveContainer" containerID="31fe86cb318306616d39850b2094828652e5181bae98c6563c8e5d3d8c3552ab" Oct 06 14:00:27 crc kubenswrapper[4757]: E1006 14:00:27.222161 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"31fe86cb318306616d39850b2094828652e5181bae98c6563c8e5d3d8c3552ab\": container with ID starting with 31fe86cb318306616d39850b2094828652e5181bae98c6563c8e5d3d8c3552ab not found: ID does not exist" containerID="31fe86cb318306616d39850b2094828652e5181bae98c6563c8e5d3d8c3552ab" Oct 06 14:00:27 crc kubenswrapper[4757]: I1006 14:00:27.222211 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"31fe86cb318306616d39850b2094828652e5181bae98c6563c8e5d3d8c3552ab"} err="failed to get container status \"31fe86cb318306616d39850b2094828652e5181bae98c6563c8e5d3d8c3552ab\": rpc error: code = NotFound desc = could not find container \"31fe86cb318306616d39850b2094828652e5181bae98c6563c8e5d3d8c3552ab\": container with ID starting with 31fe86cb318306616d39850b2094828652e5181bae98c6563c8e5d3d8c3552ab not found: ID does not exist" Oct 06 14:00:27 crc kubenswrapper[4757]: I1006 14:00:27.224969 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/80bdd62a-4024-4734-9ca0-a97f2bae29c3-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "80bdd62a-4024-4734-9ca0-a97f2bae29c3" (UID: "80bdd62a-4024-4734-9ca0-a97f2bae29c3"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 14:00:27 crc kubenswrapper[4757]: I1006 14:00:27.228022 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/80bdd62a-4024-4734-9ca0-a97f2bae29c3-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "80bdd62a-4024-4734-9ca0-a97f2bae29c3" (UID: "80bdd62a-4024-4734-9ca0-a97f2bae29c3"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 14:00:27 crc kubenswrapper[4757]: I1006 14:00:27.279605 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rfbzb\" (UniqueName: \"kubernetes.io/projected/80bdd62a-4024-4734-9ca0-a97f2bae29c3-kube-api-access-rfbzb\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:27 crc kubenswrapper[4757]: I1006 14:00:27.279802 4757 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/80bdd62a-4024-4734-9ca0-a97f2bae29c3-config-data\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:27 crc kubenswrapper[4757]: I1006 14:00:27.279822 4757 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/80bdd62a-4024-4734-9ca0-a97f2bae29c3-scripts\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:27 crc kubenswrapper[4757]: I1006 14:00:27.279840 4757 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/80bdd62a-4024-4734-9ca0-a97f2bae29c3-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:27 crc kubenswrapper[4757]: I1006 14:00:27.279894 4757 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/80bdd62a-4024-4734-9ca0-a97f2bae29c3-fernet-keys\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:27 crc kubenswrapper[4757]: I1006 14:00:27.279911 4757 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/80bdd62a-4024-4734-9ca0-a97f2bae29c3-public-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:27 crc kubenswrapper[4757]: I1006 14:00:27.279927 4757 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/80bdd62a-4024-4734-9ca0-a97f2bae29c3-credential-keys\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:27 crc kubenswrapper[4757]: I1006 14:00:27.279982 4757 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/80bdd62a-4024-4734-9ca0-a97f2bae29c3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:27 crc kubenswrapper[4757]: E1006 14:00:27.322319 4757 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="656d1308014c9035d686484ef2821c17e6d01aebe44ce39b183a689e42ab6a36" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 06 14:00:27 crc kubenswrapper[4757]: E1006 14:00:27.324312 4757 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="656d1308014c9035d686484ef2821c17e6d01aebe44ce39b183a689e42ab6a36" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 06 14:00:27 crc kubenswrapper[4757]: E1006 14:00:27.336174 4757 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="656d1308014c9035d686484ef2821c17e6d01aebe44ce39b183a689e42ab6a36" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 06 14:00:27 crc kubenswrapper[4757]: E1006 14:00:27.336264 4757 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="19ac04ce-d95a-49ab-8eb2-eaf505990a53" containerName="nova-scheduler-scheduler" Oct 06 14:00:27 crc kubenswrapper[4757]: I1006 14:00:27.532946 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-678769d845-d782m"] Oct 06 14:00:27 crc kubenswrapper[4757]: I1006 14:00:27.537652 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-678769d845-d782m"] Oct 06 14:00:27 crc kubenswrapper[4757]: E1006 14:00:27.584614 4757 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Oct 06 14:00:27 crc kubenswrapper[4757]: E1006 14:00:27.584690 4757 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61-config-data podName:cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61 nodeName:}" failed. No retries permitted until 2025-10-06 14:00:35.584673447 +0000 UTC m=+1324.081991984 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61-config-data") pod "rabbitmq-cell1-server-0" (UID: "cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61") : configmap "rabbitmq-cell1-config-data" not found Oct 06 14:00:27 crc kubenswrapper[4757]: I1006 14:00:27.602470 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_cc01b313-87cb-44f6-9c85-84ae4931e1f6/ovn-northd/0.log" Oct 06 14:00:27 crc kubenswrapper[4757]: I1006 14:00:27.602530 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Oct 06 14:00:27 crc kubenswrapper[4757]: I1006 14:00:27.696664 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cc01b313-87cb-44f6-9c85-84ae4931e1f6-config\") pod \"cc01b313-87cb-44f6-9c85-84ae4931e1f6\" (UID: \"cc01b313-87cb-44f6-9c85-84ae4931e1f6\") " Oct 06 14:00:27 crc kubenswrapper[4757]: I1006 14:00:27.696735 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/cc01b313-87cb-44f6-9c85-84ae4931e1f6-ovn-rundir\") pod \"cc01b313-87cb-44f6-9c85-84ae4931e1f6\" (UID: \"cc01b313-87cb-44f6-9c85-84ae4931e1f6\") " Oct 06 14:00:27 crc kubenswrapper[4757]: I1006 14:00:27.696790 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/cc01b313-87cb-44f6-9c85-84ae4931e1f6-metrics-certs-tls-certs\") pod \"cc01b313-87cb-44f6-9c85-84ae4931e1f6\" (UID: \"cc01b313-87cb-44f6-9c85-84ae4931e1f6\") " Oct 06 14:00:27 crc kubenswrapper[4757]: I1006 14:00:27.696819 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc01b313-87cb-44f6-9c85-84ae4931e1f6-combined-ca-bundle\") pod \"cc01b313-87cb-44f6-9c85-84ae4931e1f6\" (UID: \"cc01b313-87cb-44f6-9c85-84ae4931e1f6\") " Oct 06 14:00:27 crc kubenswrapper[4757]: I1006 14:00:27.696903 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/cc01b313-87cb-44f6-9c85-84ae4931e1f6-ovn-northd-tls-certs\") pod \"cc01b313-87cb-44f6-9c85-84ae4931e1f6\" (UID: \"cc01b313-87cb-44f6-9c85-84ae4931e1f6\") " Oct 06 14:00:27 crc kubenswrapper[4757]: I1006 14:00:27.696931 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/cc01b313-87cb-44f6-9c85-84ae4931e1f6-scripts\") pod \"cc01b313-87cb-44f6-9c85-84ae4931e1f6\" (UID: \"cc01b313-87cb-44f6-9c85-84ae4931e1f6\") " Oct 06 14:00:27 crc kubenswrapper[4757]: I1006 14:00:27.696969 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f845v\" (UniqueName: \"kubernetes.io/projected/cc01b313-87cb-44f6-9c85-84ae4931e1f6-kube-api-access-f845v\") pod \"cc01b313-87cb-44f6-9c85-84ae4931e1f6\" (UID: \"cc01b313-87cb-44f6-9c85-84ae4931e1f6\") " Oct 06 14:00:27 crc kubenswrapper[4757]: I1006 14:00:27.697185 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cc01b313-87cb-44f6-9c85-84ae4931e1f6-ovn-rundir" (OuterVolumeSpecName: "ovn-rundir") pod "cc01b313-87cb-44f6-9c85-84ae4931e1f6" (UID: "cc01b313-87cb-44f6-9c85-84ae4931e1f6"). InnerVolumeSpecName "ovn-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 14:00:27 crc kubenswrapper[4757]: I1006 14:00:27.697442 4757 reconciler_common.go:293] "Volume detached for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/cc01b313-87cb-44f6-9c85-84ae4931e1f6-ovn-rundir\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:27 crc kubenswrapper[4757]: I1006 14:00:27.697809 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cc01b313-87cb-44f6-9c85-84ae4931e1f6-scripts" (OuterVolumeSpecName: "scripts") pod "cc01b313-87cb-44f6-9c85-84ae4931e1f6" (UID: "cc01b313-87cb-44f6-9c85-84ae4931e1f6"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 14:00:27 crc kubenswrapper[4757]: I1006 14:00:27.697805 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cc01b313-87cb-44f6-9c85-84ae4931e1f6-config" (OuterVolumeSpecName: "config") pod "cc01b313-87cb-44f6-9c85-84ae4931e1f6" (UID: "cc01b313-87cb-44f6-9c85-84ae4931e1f6"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 14:00:27 crc kubenswrapper[4757]: I1006 14:00:27.701442 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cc01b313-87cb-44f6-9c85-84ae4931e1f6-kube-api-access-f845v" (OuterVolumeSpecName: "kube-api-access-f845v") pod "cc01b313-87cb-44f6-9c85-84ae4931e1f6" (UID: "cc01b313-87cb-44f6-9c85-84ae4931e1f6"). InnerVolumeSpecName "kube-api-access-f845v". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 14:00:27 crc kubenswrapper[4757]: I1006 14:00:27.736360 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cc01b313-87cb-44f6-9c85-84ae4931e1f6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cc01b313-87cb-44f6-9c85-84ae4931e1f6" (UID: "cc01b313-87cb-44f6-9c85-84ae4931e1f6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 14:00:27 crc kubenswrapper[4757]: I1006 14:00:27.764389 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cc01b313-87cb-44f6-9c85-84ae4931e1f6-ovn-northd-tls-certs" (OuterVolumeSpecName: "ovn-northd-tls-certs") pod "cc01b313-87cb-44f6-9c85-84ae4931e1f6" (UID: "cc01b313-87cb-44f6-9c85-84ae4931e1f6"). InnerVolumeSpecName "ovn-northd-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 14:00:27 crc kubenswrapper[4757]: I1006 14:00:27.766328 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cc01b313-87cb-44f6-9c85-84ae4931e1f6-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "cc01b313-87cb-44f6-9c85-84ae4931e1f6" (UID: "cc01b313-87cb-44f6-9c85-84ae4931e1f6"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 14:00:27 crc kubenswrapper[4757]: I1006 14:00:27.799087 4757 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cc01b313-87cb-44f6-9c85-84ae4931e1f6-config\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:27 crc kubenswrapper[4757]: I1006 14:00:27.799130 4757 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/cc01b313-87cb-44f6-9c85-84ae4931e1f6-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:27 crc kubenswrapper[4757]: I1006 14:00:27.799141 4757 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc01b313-87cb-44f6-9c85-84ae4931e1f6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:27 crc kubenswrapper[4757]: I1006 14:00:27.799152 4757 reconciler_common.go:293] "Volume detached for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/cc01b313-87cb-44f6-9c85-84ae4931e1f6-ovn-northd-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:27 crc kubenswrapper[4757]: I1006 14:00:27.799161 4757 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/cc01b313-87cb-44f6-9c85-84ae4931e1f6-scripts\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:27 crc kubenswrapper[4757]: I1006 14:00:27.799169 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f845v\" (UniqueName: \"kubernetes.io/projected/cc01b313-87cb-44f6-9c85-84ae4931e1f6-kube-api-access-f845v\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.006137 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.110586 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7-plugins-conf\") pod \"0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7\" (UID: \"0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7\") " Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.110651 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7-erlang-cookie-secret\") pod \"0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7\" (UID: \"0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7\") " Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.110680 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7\" (UID: \"0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7\") " Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.110737 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7-rabbitmq-erlang-cookie\") pod \"0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7\" (UID: \"0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7\") " Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.110772 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fk52f\" (UniqueName: \"kubernetes.io/projected/0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7-kube-api-access-fk52f\") pod \"0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7\" (UID: \"0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7\") " Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.110803 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7-pod-info\") pod \"0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7\" (UID: \"0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7\") " Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.110833 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7-rabbitmq-tls\") pod \"0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7\" (UID: \"0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7\") " Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.110906 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7-rabbitmq-confd\") pod \"0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7\" (UID: \"0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7\") " Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.110960 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7-rabbitmq-plugins\") pod \"0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7\" (UID: \"0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7\") " Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.110991 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7-server-conf\") pod \"0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7\" (UID: \"0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7\") " Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.111020 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7-config-data\") pod \"0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7\" (UID: \"0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7\") " Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.112218 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7" (UID: "0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.114628 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7" (UID: "0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.115283 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7" (UID: "0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.116541 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7" (UID: "0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.117666 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7-pod-info" (OuterVolumeSpecName: "pod-info") pod "0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7" (UID: "0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.118822 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7-kube-api-access-fk52f" (OuterVolumeSpecName: "kube-api-access-fk52f") pod "0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7" (UID: "0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7"). InnerVolumeSpecName "kube-api-access-fk52f". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.118923 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage06-crc" (OuterVolumeSpecName: "persistence") pod "0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7" (UID: "0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7"). InnerVolumeSpecName "local-storage06-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.122058 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7" (UID: "0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.139792 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7-config-data" (OuterVolumeSpecName: "config-data") pod "0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7" (UID: "0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.161663 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7-server-conf" (OuterVolumeSpecName: "server-conf") pod "0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7" (UID: "0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.188624 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.190780 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1489eff7-41ff-420a-bce0-14247f8554ee" path="/var/lib/kubelet/pods/1489eff7-41ff-420a-bce0-14247f8554ee/volumes" Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.191617 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="24ed1753-25de-4bde-8158-52cb7dd6a2f1" path="/var/lib/kubelet/pods/24ed1753-25de-4bde-8158-52cb7dd6a2f1/volumes" Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.192546 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4da5a9db-df84-4b71-b566-7c723fd7eb65" path="/var/lib/kubelet/pods/4da5a9db-df84-4b71-b566-7c723fd7eb65/volumes" Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.194400 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="542090f5-d2d8-4f78-b566-10e9885c341e" path="/var/lib/kubelet/pods/542090f5-d2d8-4f78-b566-10e9885c341e/volumes" Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.195783 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="71099fbe-349d-4a04-857c-41f270ec89af" path="/var/lib/kubelet/pods/71099fbe-349d-4a04-857c-41f270ec89af/volumes" Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.197366 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="78a4414b-7eec-457f-b08c-aeb719ffc320" path="/var/lib/kubelet/pods/78a4414b-7eec-457f-b08c-aeb719ffc320/volumes" Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.198200 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="80bdd62a-4024-4734-9ca0-a97f2bae29c3" path="/var/lib/kubelet/pods/80bdd62a-4024-4734-9ca0-a97f2bae29c3/volumes" Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.198858 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="985540de-3212-41f4-a3a6-180ff5c4eda2" path="/var/lib/kubelet/pods/985540de-3212-41f4-a3a6-180ff5c4eda2/volumes" Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.200215 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ac42434c-8367-4cf2-9134-2d85444f90f4" path="/var/lib/kubelet/pods/ac42434c-8367-4cf2-9134-2d85444f90f4/volumes" Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.201380 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c84ddadb-263d-4a4a-bc3f-b645c449e392" path="/var/lib/kubelet/pods/c84ddadb-263d-4a4a-bc3f-b645c449e392/volumes" Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.202377 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="df034a5b-6848-45e4-9e1b-8613a9ab590f" path="/var/lib/kubelet/pods/df034a5b-6848-45e4-9e1b-8613a9ab590f/volumes" Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.202810 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dffbfc0a-c4ec-41cc-873b-552bc6b7fa69" path="/var/lib/kubelet/pods/dffbfc0a-c4ec-41cc-873b-552bc6b7fa69/volumes" Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.212639 4757 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.212680 4757 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7-server-conf\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.212691 4757 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7-config-data\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.212702 4757 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7-plugins-conf\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.212713 4757 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.212746 4757 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" " Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.212761 4757 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.212774 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fk52f\" (UniqueName: \"kubernetes.io/projected/0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7-kube-api-access-fk52f\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.212785 4757 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7-pod-info\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.212795 4757 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.222796 4757 generic.go:334] "Generic (PLEG): container finished" podID="cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61" containerID="a1511484b8d6f60a8bb8cc2011aa0f1a81bd98179bc7f090f9aec83ec900815b" exitCode=0 Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.222880 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61","Type":"ContainerDied","Data":"a1511484b8d6f60a8bb8cc2011aa0f1a81bd98179bc7f090f9aec83ec900815b"} Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.222901 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61","Type":"ContainerDied","Data":"75250fb51d6344b2c68698bb3fb1999e519b95bf1c6e8dbeed48644d8ef8b752"} Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.222917 4757 scope.go:117] "RemoveContainer" containerID="a1511484b8d6f60a8bb8cc2011aa0f1a81bd98179bc7f090f9aec83ec900815b" Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.223004 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.228475 4757 generic.go:334] "Generic (PLEG): container finished" podID="0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7" containerID="1512b2729e3ce1fbedadd844006578943dc275809b766a9088f97452d877e7e7" exitCode=0 Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.228564 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.228857 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7","Type":"ContainerDied","Data":"1512b2729e3ce1fbedadd844006578943dc275809b766a9088f97452d877e7e7"} Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.229035 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7","Type":"ContainerDied","Data":"ac927ab9b18957952cba5b66bcedd6426a66d7dcae41ab75d009ce81b7468fe5"} Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.231566 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_cc01b313-87cb-44f6-9c85-84ae4931e1f6/ovn-northd/0.log" Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.231652 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.231616 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"cc01b313-87cb-44f6-9c85-84ae4931e1f6","Type":"ContainerDied","Data":"d7338582b225420a21917e687be20d0d1a41b74e3eada643c81f2538c1c6531c"} Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.236796 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7" (UID: "0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.240035 4757 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage06-crc" (UniqueName: "kubernetes.io/local-volume/local-storage06-crc") on node "crc" Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.258358 4757 scope.go:117] "RemoveContainer" containerID="42364390ce15ba7722dd995ec200f051b040d7e11226cb59a9ad77032b3171c4" Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.262836 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-northd-0"] Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.271103 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-northd-0"] Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.279030 4757 scope.go:117] "RemoveContainer" containerID="a1511484b8d6f60a8bb8cc2011aa0f1a81bd98179bc7f090f9aec83ec900815b" Oct 06 14:00:28 crc kubenswrapper[4757]: E1006 14:00:28.279512 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a1511484b8d6f60a8bb8cc2011aa0f1a81bd98179bc7f090f9aec83ec900815b\": container with ID starting with a1511484b8d6f60a8bb8cc2011aa0f1a81bd98179bc7f090f9aec83ec900815b not found: ID does not exist" containerID="a1511484b8d6f60a8bb8cc2011aa0f1a81bd98179bc7f090f9aec83ec900815b" Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.279555 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a1511484b8d6f60a8bb8cc2011aa0f1a81bd98179bc7f090f9aec83ec900815b"} err="failed to get container status \"a1511484b8d6f60a8bb8cc2011aa0f1a81bd98179bc7f090f9aec83ec900815b\": rpc error: code = NotFound desc = could not find container \"a1511484b8d6f60a8bb8cc2011aa0f1a81bd98179bc7f090f9aec83ec900815b\": container with ID starting with a1511484b8d6f60a8bb8cc2011aa0f1a81bd98179bc7f090f9aec83ec900815b not found: ID does not exist" Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.279581 4757 scope.go:117] "RemoveContainer" containerID="42364390ce15ba7722dd995ec200f051b040d7e11226cb59a9ad77032b3171c4" Oct 06 14:00:28 crc kubenswrapper[4757]: E1006 14:00:28.279865 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"42364390ce15ba7722dd995ec200f051b040d7e11226cb59a9ad77032b3171c4\": container with ID starting with 42364390ce15ba7722dd995ec200f051b040d7e11226cb59a9ad77032b3171c4 not found: ID does not exist" containerID="42364390ce15ba7722dd995ec200f051b040d7e11226cb59a9ad77032b3171c4" Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.279892 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"42364390ce15ba7722dd995ec200f051b040d7e11226cb59a9ad77032b3171c4"} err="failed to get container status \"42364390ce15ba7722dd995ec200f051b040d7e11226cb59a9ad77032b3171c4\": rpc error: code = NotFound desc = could not find container \"42364390ce15ba7722dd995ec200f051b040d7e11226cb59a9ad77032b3171c4\": container with ID starting with 42364390ce15ba7722dd995ec200f051b040d7e11226cb59a9ad77032b3171c4 not found: ID does not exist" Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.279907 4757 scope.go:117] "RemoveContainer" containerID="1512b2729e3ce1fbedadd844006578943dc275809b766a9088f97452d877e7e7" Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.318306 4757 scope.go:117] "RemoveContainer" containerID="60db55d4ac0f9a0a97f39e30e07ad0326dde67452cb7ced75d9e2b87024ac379" Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.336172 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61-erlang-cookie-secret\") pod \"cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61\" (UID: \"cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61\") " Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.336333 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61-pod-info\") pod \"cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61\" (UID: \"cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61\") " Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.336382 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61-rabbitmq-confd\") pod \"cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61\" (UID: \"cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61\") " Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.336443 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61-plugins-conf\") pod \"cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61\" (UID: \"cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61\") " Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.336476 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61-rabbitmq-plugins\") pod \"cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61\" (UID: \"cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61\") " Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.336496 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7ttqm\" (UniqueName: \"kubernetes.io/projected/cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61-kube-api-access-7ttqm\") pod \"cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61\" (UID: \"cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61\") " Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.336541 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61-rabbitmq-tls\") pod \"cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61\" (UID: \"cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61\") " Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.336597 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61-config-data\") pod \"cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61\" (UID: \"cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61\") " Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.336635 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61-rabbitmq-erlang-cookie\") pod \"cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61\" (UID: \"cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61\") " Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.336650 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61-server-conf\") pod \"cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61\" (UID: \"cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61\") " Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.336684 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61\" (UID: \"cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61\") " Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.337280 4757 reconciler_common.go:293] "Volume detached for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.337299 4757 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.340848 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61" (UID: "cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.342617 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage12-crc" (OuterVolumeSpecName: "persistence") pod "cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61" (UID: "cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61"). InnerVolumeSpecName "local-storage12-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.342655 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61" (UID: "cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.343513 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61" (UID: "cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.344311 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61-kube-api-access-7ttqm" (OuterVolumeSpecName: "kube-api-access-7ttqm") pod "cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61" (UID: "cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61"). InnerVolumeSpecName "kube-api-access-7ttqm". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.344665 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61-pod-info" (OuterVolumeSpecName: "pod-info") pod "cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61" (UID: "cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.344801 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61" (UID: "cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.347688 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61" (UID: "cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.364360 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61-config-data" (OuterVolumeSpecName: "config-data") pod "cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61" (UID: "cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 14:00:28 crc kubenswrapper[4757]: E1006 14:00:28.375791 4757 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="9ce79a47b7eef2fc901e662389fe9df01b87649a04e94bc2a16eff829fe1027b" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Oct 06 14:00:28 crc kubenswrapper[4757]: E1006 14:00:28.377146 4757 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="9ce79a47b7eef2fc901e662389fe9df01b87649a04e94bc2a16eff829fe1027b" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Oct 06 14:00:28 crc kubenswrapper[4757]: E1006 14:00:28.378449 4757 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="9ce79a47b7eef2fc901e662389fe9df01b87649a04e94bc2a16eff829fe1027b" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Oct 06 14:00:28 crc kubenswrapper[4757]: E1006 14:00:28.378507 4757 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-cell0-conductor-0" podUID="924963ee-1194-4d98-84d7-9bb3e426f7bc" containerName="nova-cell0-conductor-conductor" Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.385780 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61-server-conf" (OuterVolumeSpecName: "server-conf") pod "cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61" (UID: "cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.433383 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61" (UID: "cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.438180 4757 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.438214 4757 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61-server-conf\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.438252 4757 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" " Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.438262 4757 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.438270 4757 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61-pod-info\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.438278 4757 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.438286 4757 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61-plugins-conf\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.438294 4757 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.438303 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7ttqm\" (UniqueName: \"kubernetes.io/projected/cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61-kube-api-access-7ttqm\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.438313 4757 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.438321 4757 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61-config-data\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.453364 4757 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage12-crc" (UniqueName: "kubernetes.io/local-volume/local-storage12-crc") on node "crc" Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.490641 4757 scope.go:117] "RemoveContainer" containerID="1512b2729e3ce1fbedadd844006578943dc275809b766a9088f97452d877e7e7" Oct 06 14:00:28 crc kubenswrapper[4757]: E1006 14:00:28.491112 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1512b2729e3ce1fbedadd844006578943dc275809b766a9088f97452d877e7e7\": container with ID starting with 1512b2729e3ce1fbedadd844006578943dc275809b766a9088f97452d877e7e7 not found: ID does not exist" containerID="1512b2729e3ce1fbedadd844006578943dc275809b766a9088f97452d877e7e7" Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.491157 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1512b2729e3ce1fbedadd844006578943dc275809b766a9088f97452d877e7e7"} err="failed to get container status \"1512b2729e3ce1fbedadd844006578943dc275809b766a9088f97452d877e7e7\": rpc error: code = NotFound desc = could not find container \"1512b2729e3ce1fbedadd844006578943dc275809b766a9088f97452d877e7e7\": container with ID starting with 1512b2729e3ce1fbedadd844006578943dc275809b766a9088f97452d877e7e7 not found: ID does not exist" Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.491186 4757 scope.go:117] "RemoveContainer" containerID="60db55d4ac0f9a0a97f39e30e07ad0326dde67452cb7ced75d9e2b87024ac379" Oct 06 14:00:28 crc kubenswrapper[4757]: E1006 14:00:28.491464 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"60db55d4ac0f9a0a97f39e30e07ad0326dde67452cb7ced75d9e2b87024ac379\": container with ID starting with 60db55d4ac0f9a0a97f39e30e07ad0326dde67452cb7ced75d9e2b87024ac379 not found: ID does not exist" containerID="60db55d4ac0f9a0a97f39e30e07ad0326dde67452cb7ced75d9e2b87024ac379" Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.491517 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"60db55d4ac0f9a0a97f39e30e07ad0326dde67452cb7ced75d9e2b87024ac379"} err="failed to get container status \"60db55d4ac0f9a0a97f39e30e07ad0326dde67452cb7ced75d9e2b87024ac379\": rpc error: code = NotFound desc = could not find container \"60db55d4ac0f9a0a97f39e30e07ad0326dde67452cb7ced75d9e2b87024ac379\": container with ID starting with 60db55d4ac0f9a0a97f39e30e07ad0326dde67452cb7ced75d9e2b87024ac379 not found: ID does not exist" Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.491548 4757 scope.go:117] "RemoveContainer" containerID="b1eadb61598f06991c54612b8621cac74bf4c117422efafbae7b8a42d1721473" Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.514431 4757 scope.go:117] "RemoveContainer" containerID="1c6cdcbf807850c39d917f526c3686bad9485f567af58c814535019523f2074b" Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.547726 4757 reconciler_common.go:293] "Volume detached for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.585356 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.593923 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.600636 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 06 14:00:28 crc kubenswrapper[4757]: I1006 14:00:28.629546 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 06 14:00:29 crc kubenswrapper[4757]: I1006 14:00:29.116258 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-996ccd7c9-wv7n4" Oct 06 14:00:29 crc kubenswrapper[4757]: I1006 14:00:29.244894 4757 generic.go:334] "Generic (PLEG): container finished" podID="3e6333c1-01c0-42fd-a75f-31a2c57e9db2" containerID="7ca2f658b6761b42b567741f568c193b4f94743723c1e6c585be5cb9f29980bb" exitCode=0 Oct 06 14:00:29 crc kubenswrapper[4757]: I1006 14:00:29.244952 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-996ccd7c9-wv7n4" Oct 06 14:00:29 crc kubenswrapper[4757]: I1006 14:00:29.244964 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-996ccd7c9-wv7n4" event={"ID":"3e6333c1-01c0-42fd-a75f-31a2c57e9db2","Type":"ContainerDied","Data":"7ca2f658b6761b42b567741f568c193b4f94743723c1e6c585be5cb9f29980bb"} Oct 06 14:00:29 crc kubenswrapper[4757]: I1006 14:00:29.245215 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-996ccd7c9-wv7n4" event={"ID":"3e6333c1-01c0-42fd-a75f-31a2c57e9db2","Type":"ContainerDied","Data":"c351b5e79eb368979f987d102b3982934f0a96d05220650140c53c997e64bc49"} Oct 06 14:00:29 crc kubenswrapper[4757]: I1006 14:00:29.245235 4757 scope.go:117] "RemoveContainer" containerID="7ca2f658b6761b42b567741f568c193b4f94743723c1e6c585be5cb9f29980bb" Oct 06 14:00:29 crc kubenswrapper[4757]: I1006 14:00:29.248548 4757 generic.go:334] "Generic (PLEG): container finished" podID="924963ee-1194-4d98-84d7-9bb3e426f7bc" containerID="9ce79a47b7eef2fc901e662389fe9df01b87649a04e94bc2a16eff829fe1027b" exitCode=0 Oct 06 14:00:29 crc kubenswrapper[4757]: I1006 14:00:29.248605 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"924963ee-1194-4d98-84d7-9bb3e426f7bc","Type":"ContainerDied","Data":"9ce79a47b7eef2fc901e662389fe9df01b87649a04e94bc2a16eff829fe1027b"} Oct 06 14:00:29 crc kubenswrapper[4757]: I1006 14:00:29.253396 4757 generic.go:334] "Generic (PLEG): container finished" podID="19ac04ce-d95a-49ab-8eb2-eaf505990a53" containerID="656d1308014c9035d686484ef2821c17e6d01aebe44ce39b183a689e42ab6a36" exitCode=0 Oct 06 14:00:29 crc kubenswrapper[4757]: I1006 14:00:29.253441 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"19ac04ce-d95a-49ab-8eb2-eaf505990a53","Type":"ContainerDied","Data":"656d1308014c9035d686484ef2821c17e6d01aebe44ce39b183a689e42ab6a36"} Oct 06 14:00:29 crc kubenswrapper[4757]: I1006 14:00:29.263562 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3e6333c1-01c0-42fd-a75f-31a2c57e9db2-logs\") pod \"3e6333c1-01c0-42fd-a75f-31a2c57e9db2\" (UID: \"3e6333c1-01c0-42fd-a75f-31a2c57e9db2\") " Oct 06 14:00:29 crc kubenswrapper[4757]: I1006 14:00:29.263618 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e6333c1-01c0-42fd-a75f-31a2c57e9db2-combined-ca-bundle\") pod \"3e6333c1-01c0-42fd-a75f-31a2c57e9db2\" (UID: \"3e6333c1-01c0-42fd-a75f-31a2c57e9db2\") " Oct 06 14:00:29 crc kubenswrapper[4757]: I1006 14:00:29.263725 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3e6333c1-01c0-42fd-a75f-31a2c57e9db2-config-data-custom\") pod \"3e6333c1-01c0-42fd-a75f-31a2c57e9db2\" (UID: \"3e6333c1-01c0-42fd-a75f-31a2c57e9db2\") " Oct 06 14:00:29 crc kubenswrapper[4757]: I1006 14:00:29.263776 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3e6333c1-01c0-42fd-a75f-31a2c57e9db2-config-data\") pod \"3e6333c1-01c0-42fd-a75f-31a2c57e9db2\" (UID: \"3e6333c1-01c0-42fd-a75f-31a2c57e9db2\") " Oct 06 14:00:29 crc kubenswrapper[4757]: I1006 14:00:29.263826 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bn598\" (UniqueName: \"kubernetes.io/projected/3e6333c1-01c0-42fd-a75f-31a2c57e9db2-kube-api-access-bn598\") pod \"3e6333c1-01c0-42fd-a75f-31a2c57e9db2\" (UID: \"3e6333c1-01c0-42fd-a75f-31a2c57e9db2\") " Oct 06 14:00:29 crc kubenswrapper[4757]: I1006 14:00:29.264019 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3e6333c1-01c0-42fd-a75f-31a2c57e9db2-logs" (OuterVolumeSpecName: "logs") pod "3e6333c1-01c0-42fd-a75f-31a2c57e9db2" (UID: "3e6333c1-01c0-42fd-a75f-31a2c57e9db2"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 14:00:29 crc kubenswrapper[4757]: I1006 14:00:29.264250 4757 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3e6333c1-01c0-42fd-a75f-31a2c57e9db2-logs\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:29 crc kubenswrapper[4757]: I1006 14:00:29.267023 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3e6333c1-01c0-42fd-a75f-31a2c57e9db2-kube-api-access-bn598" (OuterVolumeSpecName: "kube-api-access-bn598") pod "3e6333c1-01c0-42fd-a75f-31a2c57e9db2" (UID: "3e6333c1-01c0-42fd-a75f-31a2c57e9db2"). InnerVolumeSpecName "kube-api-access-bn598". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 14:00:29 crc kubenswrapper[4757]: I1006 14:00:29.268964 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3e6333c1-01c0-42fd-a75f-31a2c57e9db2-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "3e6333c1-01c0-42fd-a75f-31a2c57e9db2" (UID: "3e6333c1-01c0-42fd-a75f-31a2c57e9db2"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 14:00:29 crc kubenswrapper[4757]: I1006 14:00:29.317308 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3e6333c1-01c0-42fd-a75f-31a2c57e9db2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3e6333c1-01c0-42fd-a75f-31a2c57e9db2" (UID: "3e6333c1-01c0-42fd-a75f-31a2c57e9db2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 14:00:29 crc kubenswrapper[4757]: I1006 14:00:29.324869 4757 scope.go:117] "RemoveContainer" containerID="b87d6d10ad564103f56d850792c4af8bcf31280f5d0ac840a8140a30f0613988" Oct 06 14:00:29 crc kubenswrapper[4757]: I1006 14:00:29.351961 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3e6333c1-01c0-42fd-a75f-31a2c57e9db2-config-data" (OuterVolumeSpecName: "config-data") pod "3e6333c1-01c0-42fd-a75f-31a2c57e9db2" (UID: "3e6333c1-01c0-42fd-a75f-31a2c57e9db2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 14:00:29 crc kubenswrapper[4757]: I1006 14:00:29.365625 4757 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e6333c1-01c0-42fd-a75f-31a2c57e9db2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:29 crc kubenswrapper[4757]: I1006 14:00:29.365652 4757 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3e6333c1-01c0-42fd-a75f-31a2c57e9db2-config-data-custom\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:29 crc kubenswrapper[4757]: I1006 14:00:29.365662 4757 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3e6333c1-01c0-42fd-a75f-31a2c57e9db2-config-data\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:29 crc kubenswrapper[4757]: I1006 14:00:29.365672 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bn598\" (UniqueName: \"kubernetes.io/projected/3e6333c1-01c0-42fd-a75f-31a2c57e9db2-kube-api-access-bn598\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:29 crc kubenswrapper[4757]: I1006 14:00:29.393238 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 06 14:00:29 crc kubenswrapper[4757]: I1006 14:00:29.398479 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Oct 06 14:00:29 crc kubenswrapper[4757]: I1006 14:00:29.402865 4757 scope.go:117] "RemoveContainer" containerID="7ca2f658b6761b42b567741f568c193b4f94743723c1e6c585be5cb9f29980bb" Oct 06 14:00:29 crc kubenswrapper[4757]: E1006 14:00:29.403133 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7ca2f658b6761b42b567741f568c193b4f94743723c1e6c585be5cb9f29980bb\": container with ID starting with 7ca2f658b6761b42b567741f568c193b4f94743723c1e6c585be5cb9f29980bb not found: ID does not exist" containerID="7ca2f658b6761b42b567741f568c193b4f94743723c1e6c585be5cb9f29980bb" Oct 06 14:00:29 crc kubenswrapper[4757]: I1006 14:00:29.403159 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7ca2f658b6761b42b567741f568c193b4f94743723c1e6c585be5cb9f29980bb"} err="failed to get container status \"7ca2f658b6761b42b567741f568c193b4f94743723c1e6c585be5cb9f29980bb\": rpc error: code = NotFound desc = could not find container \"7ca2f658b6761b42b567741f568c193b4f94743723c1e6c585be5cb9f29980bb\": container with ID starting with 7ca2f658b6761b42b567741f568c193b4f94743723c1e6c585be5cb9f29980bb not found: ID does not exist" Oct 06 14:00:29 crc kubenswrapper[4757]: I1006 14:00:29.403176 4757 scope.go:117] "RemoveContainer" containerID="b87d6d10ad564103f56d850792c4af8bcf31280f5d0ac840a8140a30f0613988" Oct 06 14:00:29 crc kubenswrapper[4757]: E1006 14:00:29.403383 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b87d6d10ad564103f56d850792c4af8bcf31280f5d0ac840a8140a30f0613988\": container with ID starting with b87d6d10ad564103f56d850792c4af8bcf31280f5d0ac840a8140a30f0613988 not found: ID does not exist" containerID="b87d6d10ad564103f56d850792c4af8bcf31280f5d0ac840a8140a30f0613988" Oct 06 14:00:29 crc kubenswrapper[4757]: I1006 14:00:29.403401 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b87d6d10ad564103f56d850792c4af8bcf31280f5d0ac840a8140a30f0613988"} err="failed to get container status \"b87d6d10ad564103f56d850792c4af8bcf31280f5d0ac840a8140a30f0613988\": rpc error: code = NotFound desc = could not find container \"b87d6d10ad564103f56d850792c4af8bcf31280f5d0ac840a8140a30f0613988\": container with ID starting with b87d6d10ad564103f56d850792c4af8bcf31280f5d0ac840a8140a30f0613988 not found: ID does not exist" Oct 06 14:00:29 crc kubenswrapper[4757]: I1006 14:00:29.466978 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kzx5q\" (UniqueName: \"kubernetes.io/projected/19ac04ce-d95a-49ab-8eb2-eaf505990a53-kube-api-access-kzx5q\") pod \"19ac04ce-d95a-49ab-8eb2-eaf505990a53\" (UID: \"19ac04ce-d95a-49ab-8eb2-eaf505990a53\") " Oct 06 14:00:29 crc kubenswrapper[4757]: I1006 14:00:29.469650 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/19ac04ce-d95a-49ab-8eb2-eaf505990a53-kube-api-access-kzx5q" (OuterVolumeSpecName: "kube-api-access-kzx5q") pod "19ac04ce-d95a-49ab-8eb2-eaf505990a53" (UID: "19ac04ce-d95a-49ab-8eb2-eaf505990a53"). InnerVolumeSpecName "kube-api-access-kzx5q". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 14:00:29 crc kubenswrapper[4757]: I1006 14:00:29.568071 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/924963ee-1194-4d98-84d7-9bb3e426f7bc-config-data\") pod \"924963ee-1194-4d98-84d7-9bb3e426f7bc\" (UID: \"924963ee-1194-4d98-84d7-9bb3e426f7bc\") " Oct 06 14:00:29 crc kubenswrapper[4757]: I1006 14:00:29.568175 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cpssb\" (UniqueName: \"kubernetes.io/projected/924963ee-1194-4d98-84d7-9bb3e426f7bc-kube-api-access-cpssb\") pod \"924963ee-1194-4d98-84d7-9bb3e426f7bc\" (UID: \"924963ee-1194-4d98-84d7-9bb3e426f7bc\") " Oct 06 14:00:29 crc kubenswrapper[4757]: I1006 14:00:29.568248 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/19ac04ce-d95a-49ab-8eb2-eaf505990a53-combined-ca-bundle\") pod \"19ac04ce-d95a-49ab-8eb2-eaf505990a53\" (UID: \"19ac04ce-d95a-49ab-8eb2-eaf505990a53\") " Oct 06 14:00:29 crc kubenswrapper[4757]: I1006 14:00:29.568302 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/924963ee-1194-4d98-84d7-9bb3e426f7bc-combined-ca-bundle\") pod \"924963ee-1194-4d98-84d7-9bb3e426f7bc\" (UID: \"924963ee-1194-4d98-84d7-9bb3e426f7bc\") " Oct 06 14:00:29 crc kubenswrapper[4757]: I1006 14:00:29.568328 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/19ac04ce-d95a-49ab-8eb2-eaf505990a53-config-data\") pod \"19ac04ce-d95a-49ab-8eb2-eaf505990a53\" (UID: \"19ac04ce-d95a-49ab-8eb2-eaf505990a53\") " Oct 06 14:00:29 crc kubenswrapper[4757]: I1006 14:00:29.568715 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kzx5q\" (UniqueName: \"kubernetes.io/projected/19ac04ce-d95a-49ab-8eb2-eaf505990a53-kube-api-access-kzx5q\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:29 crc kubenswrapper[4757]: I1006 14:00:29.571179 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/924963ee-1194-4d98-84d7-9bb3e426f7bc-kube-api-access-cpssb" (OuterVolumeSpecName: "kube-api-access-cpssb") pod "924963ee-1194-4d98-84d7-9bb3e426f7bc" (UID: "924963ee-1194-4d98-84d7-9bb3e426f7bc"). InnerVolumeSpecName "kube-api-access-cpssb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 14:00:29 crc kubenswrapper[4757]: I1006 14:00:29.586952 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-worker-996ccd7c9-wv7n4"] Oct 06 14:00:29 crc kubenswrapper[4757]: I1006 14:00:29.591347 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-worker-996ccd7c9-wv7n4"] Oct 06 14:00:29 crc kubenswrapper[4757]: I1006 14:00:29.601309 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/924963ee-1194-4d98-84d7-9bb3e426f7bc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "924963ee-1194-4d98-84d7-9bb3e426f7bc" (UID: "924963ee-1194-4d98-84d7-9bb3e426f7bc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 14:00:29 crc kubenswrapper[4757]: I1006 14:00:29.602645 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/924963ee-1194-4d98-84d7-9bb3e426f7bc-config-data" (OuterVolumeSpecName: "config-data") pod "924963ee-1194-4d98-84d7-9bb3e426f7bc" (UID: "924963ee-1194-4d98-84d7-9bb3e426f7bc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 14:00:29 crc kubenswrapper[4757]: I1006 14:00:29.604048 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/19ac04ce-d95a-49ab-8eb2-eaf505990a53-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "19ac04ce-d95a-49ab-8eb2-eaf505990a53" (UID: "19ac04ce-d95a-49ab-8eb2-eaf505990a53"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 14:00:29 crc kubenswrapper[4757]: I1006 14:00:29.613028 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/19ac04ce-d95a-49ab-8eb2-eaf505990a53-config-data" (OuterVolumeSpecName: "config-data") pod "19ac04ce-d95a-49ab-8eb2-eaf505990a53" (UID: "19ac04ce-d95a-49ab-8eb2-eaf505990a53"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 14:00:29 crc kubenswrapper[4757]: I1006 14:00:29.670740 4757 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/19ac04ce-d95a-49ab-8eb2-eaf505990a53-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:29 crc kubenswrapper[4757]: I1006 14:00:29.670785 4757 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/924963ee-1194-4d98-84d7-9bb3e426f7bc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:29 crc kubenswrapper[4757]: I1006 14:00:29.670799 4757 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/19ac04ce-d95a-49ab-8eb2-eaf505990a53-config-data\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:29 crc kubenswrapper[4757]: I1006 14:00:29.670809 4757 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/924963ee-1194-4d98-84d7-9bb3e426f7bc-config-data\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:29 crc kubenswrapper[4757]: I1006 14:00:29.670824 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cpssb\" (UniqueName: \"kubernetes.io/projected/924963ee-1194-4d98-84d7-9bb3e426f7bc-kube-api-access-cpssb\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:30 crc kubenswrapper[4757]: I1006 14:00:30.187928 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7" path="/var/lib/kubelet/pods/0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7/volumes" Oct 06 14:00:30 crc kubenswrapper[4757]: I1006 14:00:30.188535 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3e6333c1-01c0-42fd-a75f-31a2c57e9db2" path="/var/lib/kubelet/pods/3e6333c1-01c0-42fd-a75f-31a2c57e9db2/volumes" Oct 06 14:00:30 crc kubenswrapper[4757]: I1006 14:00:30.189548 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cc01b313-87cb-44f6-9c85-84ae4931e1f6" path="/var/lib/kubelet/pods/cc01b313-87cb-44f6-9c85-84ae4931e1f6/volumes" Oct 06 14:00:30 crc kubenswrapper[4757]: I1006 14:00:30.190306 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61" path="/var/lib/kubelet/pods/cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61/volumes" Oct 06 14:00:30 crc kubenswrapper[4757]: I1006 14:00:30.262962 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"924963ee-1194-4d98-84d7-9bb3e426f7bc","Type":"ContainerDied","Data":"82b0009e68461935aa036599f7fa8aebc992b0d116fd84f806bb8dcdbd1ced96"} Oct 06 14:00:30 crc kubenswrapper[4757]: I1006 14:00:30.263026 4757 scope.go:117] "RemoveContainer" containerID="9ce79a47b7eef2fc901e662389fe9df01b87649a04e94bc2a16eff829fe1027b" Oct 06 14:00:30 crc kubenswrapper[4757]: I1006 14:00:30.263264 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Oct 06 14:00:30 crc kubenswrapper[4757]: I1006 14:00:30.265495 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"19ac04ce-d95a-49ab-8eb2-eaf505990a53","Type":"ContainerDied","Data":"91edb9e968f70f253f6fa3bbcd656777ae3f7929bbfc0d79e3fea81b74e4c539"} Oct 06 14:00:30 crc kubenswrapper[4757]: I1006 14:00:30.265537 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 06 14:00:30 crc kubenswrapper[4757]: I1006 14:00:30.287278 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 06 14:00:30 crc kubenswrapper[4757]: I1006 14:00:30.288516 4757 scope.go:117] "RemoveContainer" containerID="656d1308014c9035d686484ef2821c17e6d01aebe44ce39b183a689e42ab6a36" Oct 06 14:00:30 crc kubenswrapper[4757]: I1006 14:00:30.292575 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 06 14:00:30 crc kubenswrapper[4757]: I1006 14:00:30.310556 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 06 14:00:30 crc kubenswrapper[4757]: I1006 14:00:30.321559 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Oct 06 14:00:31 crc kubenswrapper[4757]: E1006 14:00:31.591523 4757 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 21bc61d474e55f8578f65885857cf7de3c24140a6afe332ad8c23da23b86ee86 is running failed: container process not found" containerID="21bc61d474e55f8578f65885857cf7de3c24140a6afe332ad8c23da23b86ee86" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Oct 06 14:00:31 crc kubenswrapper[4757]: E1006 14:00:31.592552 4757 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 21bc61d474e55f8578f65885857cf7de3c24140a6afe332ad8c23da23b86ee86 is running failed: container process not found" containerID="21bc61d474e55f8578f65885857cf7de3c24140a6afe332ad8c23da23b86ee86" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Oct 06 14:00:31 crc kubenswrapper[4757]: E1006 14:00:31.592866 4757 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 21bc61d474e55f8578f65885857cf7de3c24140a6afe332ad8c23da23b86ee86 is running failed: container process not found" containerID="21bc61d474e55f8578f65885857cf7de3c24140a6afe332ad8c23da23b86ee86" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Oct 06 14:00:31 crc kubenswrapper[4757]: E1006 14:00:31.593105 4757 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 21bc61d474e55f8578f65885857cf7de3c24140a6afe332ad8c23da23b86ee86 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-fqwwx" podUID="3482c1c4-b15b-46cb-a897-3528fa22adda" containerName="ovsdb-server" Oct 06 14:00:31 crc kubenswrapper[4757]: E1006 14:00:31.593823 4757 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="ca5e23427b9e45a1476331fc4c8830e07091551016296db6fab1e5fc1115f6e4" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Oct 06 14:00:31 crc kubenswrapper[4757]: E1006 14:00:31.595164 4757 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="ca5e23427b9e45a1476331fc4c8830e07091551016296db6fab1e5fc1115f6e4" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Oct 06 14:00:31 crc kubenswrapper[4757]: E1006 14:00:31.596555 4757 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="ca5e23427b9e45a1476331fc4c8830e07091551016296db6fab1e5fc1115f6e4" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Oct 06 14:00:31 crc kubenswrapper[4757]: E1006 14:00:31.596668 4757 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-fqwwx" podUID="3482c1c4-b15b-46cb-a897-3528fa22adda" containerName="ovs-vswitchd" Oct 06 14:00:32 crc kubenswrapper[4757]: I1006 14:00:32.196821 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="19ac04ce-d95a-49ab-8eb2-eaf505990a53" path="/var/lib/kubelet/pods/19ac04ce-d95a-49ab-8eb2-eaf505990a53/volumes" Oct 06 14:00:32 crc kubenswrapper[4757]: I1006 14:00:32.199090 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="924963ee-1194-4d98-84d7-9bb3e426f7bc" path="/var/lib/kubelet/pods/924963ee-1194-4d98-84d7-9bb3e426f7bc/volumes" Oct 06 14:00:34 crc kubenswrapper[4757]: I1006 14:00:34.361019 4757 patch_prober.go:28] interesting pod/machine-config-daemon-7tb7h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 06 14:00:34 crc kubenswrapper[4757]: I1006 14:00:34.361065 4757 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 06 14:00:36 crc kubenswrapper[4757]: E1006 14:00:36.593233 4757 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 21bc61d474e55f8578f65885857cf7de3c24140a6afe332ad8c23da23b86ee86 is running failed: container process not found" containerID="21bc61d474e55f8578f65885857cf7de3c24140a6afe332ad8c23da23b86ee86" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Oct 06 14:00:36 crc kubenswrapper[4757]: E1006 14:00:36.594112 4757 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="ca5e23427b9e45a1476331fc4c8830e07091551016296db6fab1e5fc1115f6e4" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Oct 06 14:00:36 crc kubenswrapper[4757]: E1006 14:00:36.594319 4757 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 21bc61d474e55f8578f65885857cf7de3c24140a6afe332ad8c23da23b86ee86 is running failed: container process not found" containerID="21bc61d474e55f8578f65885857cf7de3c24140a6afe332ad8c23da23b86ee86" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Oct 06 14:00:36 crc kubenswrapper[4757]: E1006 14:00:36.594917 4757 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 21bc61d474e55f8578f65885857cf7de3c24140a6afe332ad8c23da23b86ee86 is running failed: container process not found" containerID="21bc61d474e55f8578f65885857cf7de3c24140a6afe332ad8c23da23b86ee86" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Oct 06 14:00:36 crc kubenswrapper[4757]: E1006 14:00:36.594982 4757 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 21bc61d474e55f8578f65885857cf7de3c24140a6afe332ad8c23da23b86ee86 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-fqwwx" podUID="3482c1c4-b15b-46cb-a897-3528fa22adda" containerName="ovsdb-server" Oct 06 14:00:36 crc kubenswrapper[4757]: E1006 14:00:36.595780 4757 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="ca5e23427b9e45a1476331fc4c8830e07091551016296db6fab1e5fc1115f6e4" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Oct 06 14:00:36 crc kubenswrapper[4757]: E1006 14:00:36.597133 4757 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="ca5e23427b9e45a1476331fc4c8830e07091551016296db6fab1e5fc1115f6e4" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Oct 06 14:00:36 crc kubenswrapper[4757]: E1006 14:00:36.597174 4757 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-fqwwx" podUID="3482c1c4-b15b-46cb-a897-3528fa22adda" containerName="ovs-vswitchd" Oct 06 14:00:41 crc kubenswrapper[4757]: E1006 14:00:41.591087 4757 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 21bc61d474e55f8578f65885857cf7de3c24140a6afe332ad8c23da23b86ee86 is running failed: container process not found" containerID="21bc61d474e55f8578f65885857cf7de3c24140a6afe332ad8c23da23b86ee86" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Oct 06 14:00:41 crc kubenswrapper[4757]: E1006 14:00:41.592535 4757 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 21bc61d474e55f8578f65885857cf7de3c24140a6afe332ad8c23da23b86ee86 is running failed: container process not found" containerID="21bc61d474e55f8578f65885857cf7de3c24140a6afe332ad8c23da23b86ee86" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Oct 06 14:00:41 crc kubenswrapper[4757]: E1006 14:00:41.593541 4757 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 21bc61d474e55f8578f65885857cf7de3c24140a6afe332ad8c23da23b86ee86 is running failed: container process not found" containerID="21bc61d474e55f8578f65885857cf7de3c24140a6afe332ad8c23da23b86ee86" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Oct 06 14:00:41 crc kubenswrapper[4757]: E1006 14:00:41.593601 4757 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 21bc61d474e55f8578f65885857cf7de3c24140a6afe332ad8c23da23b86ee86 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-fqwwx" podUID="3482c1c4-b15b-46cb-a897-3528fa22adda" containerName="ovsdb-server" Oct 06 14:00:41 crc kubenswrapper[4757]: E1006 14:00:41.593561 4757 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="ca5e23427b9e45a1476331fc4c8830e07091551016296db6fab1e5fc1115f6e4" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Oct 06 14:00:41 crc kubenswrapper[4757]: E1006 14:00:41.595243 4757 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="ca5e23427b9e45a1476331fc4c8830e07091551016296db6fab1e5fc1115f6e4" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Oct 06 14:00:41 crc kubenswrapper[4757]: E1006 14:00:41.597415 4757 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="ca5e23427b9e45a1476331fc4c8830e07091551016296db6fab1e5fc1115f6e4" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Oct 06 14:00:41 crc kubenswrapper[4757]: E1006 14:00:41.597487 4757 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-fqwwx" podUID="3482c1c4-b15b-46cb-a897-3528fa22adda" containerName="ovs-vswitchd" Oct 06 14:00:43 crc kubenswrapper[4757]: I1006 14:00:43.125615 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-587f5f887c-jf8v5" Oct 06 14:00:43 crc kubenswrapper[4757]: I1006 14:00:43.298241 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0aa7bf0a-b5c3-4db6-b401-e8512e1df933-internal-tls-certs\") pod \"0aa7bf0a-b5c3-4db6-b401-e8512e1df933\" (UID: \"0aa7bf0a-b5c3-4db6-b401-e8512e1df933\") " Oct 06 14:00:43 crc kubenswrapper[4757]: I1006 14:00:43.298364 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/0aa7bf0a-b5c3-4db6-b401-e8512e1df933-ovndb-tls-certs\") pod \"0aa7bf0a-b5c3-4db6-b401-e8512e1df933\" (UID: \"0aa7bf0a-b5c3-4db6-b401-e8512e1df933\") " Oct 06 14:00:43 crc kubenswrapper[4757]: I1006 14:00:43.298416 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sznj2\" (UniqueName: \"kubernetes.io/projected/0aa7bf0a-b5c3-4db6-b401-e8512e1df933-kube-api-access-sznj2\") pod \"0aa7bf0a-b5c3-4db6-b401-e8512e1df933\" (UID: \"0aa7bf0a-b5c3-4db6-b401-e8512e1df933\") " Oct 06 14:00:43 crc kubenswrapper[4757]: I1006 14:00:43.298464 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/0aa7bf0a-b5c3-4db6-b401-e8512e1df933-httpd-config\") pod \"0aa7bf0a-b5c3-4db6-b401-e8512e1df933\" (UID: \"0aa7bf0a-b5c3-4db6-b401-e8512e1df933\") " Oct 06 14:00:43 crc kubenswrapper[4757]: I1006 14:00:43.298503 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0aa7bf0a-b5c3-4db6-b401-e8512e1df933-combined-ca-bundle\") pod \"0aa7bf0a-b5c3-4db6-b401-e8512e1df933\" (UID: \"0aa7bf0a-b5c3-4db6-b401-e8512e1df933\") " Oct 06 14:00:43 crc kubenswrapper[4757]: I1006 14:00:43.298556 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0aa7bf0a-b5c3-4db6-b401-e8512e1df933-public-tls-certs\") pod \"0aa7bf0a-b5c3-4db6-b401-e8512e1df933\" (UID: \"0aa7bf0a-b5c3-4db6-b401-e8512e1df933\") " Oct 06 14:00:43 crc kubenswrapper[4757]: I1006 14:00:43.298612 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/0aa7bf0a-b5c3-4db6-b401-e8512e1df933-config\") pod \"0aa7bf0a-b5c3-4db6-b401-e8512e1df933\" (UID: \"0aa7bf0a-b5c3-4db6-b401-e8512e1df933\") " Oct 06 14:00:43 crc kubenswrapper[4757]: I1006 14:00:43.304985 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0aa7bf0a-b5c3-4db6-b401-e8512e1df933-kube-api-access-sznj2" (OuterVolumeSpecName: "kube-api-access-sznj2") pod "0aa7bf0a-b5c3-4db6-b401-e8512e1df933" (UID: "0aa7bf0a-b5c3-4db6-b401-e8512e1df933"). InnerVolumeSpecName "kube-api-access-sznj2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 14:00:43 crc kubenswrapper[4757]: I1006 14:00:43.306348 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0aa7bf0a-b5c3-4db6-b401-e8512e1df933-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "0aa7bf0a-b5c3-4db6-b401-e8512e1df933" (UID: "0aa7bf0a-b5c3-4db6-b401-e8512e1df933"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 14:00:43 crc kubenswrapper[4757]: I1006 14:00:43.347205 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0aa7bf0a-b5c3-4db6-b401-e8512e1df933-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "0aa7bf0a-b5c3-4db6-b401-e8512e1df933" (UID: "0aa7bf0a-b5c3-4db6-b401-e8512e1df933"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 14:00:43 crc kubenswrapper[4757]: I1006 14:00:43.347612 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0aa7bf0a-b5c3-4db6-b401-e8512e1df933-config" (OuterVolumeSpecName: "config") pod "0aa7bf0a-b5c3-4db6-b401-e8512e1df933" (UID: "0aa7bf0a-b5c3-4db6-b401-e8512e1df933"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 14:00:43 crc kubenswrapper[4757]: I1006 14:00:43.353455 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0aa7bf0a-b5c3-4db6-b401-e8512e1df933-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0aa7bf0a-b5c3-4db6-b401-e8512e1df933" (UID: "0aa7bf0a-b5c3-4db6-b401-e8512e1df933"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 14:00:43 crc kubenswrapper[4757]: I1006 14:00:43.362703 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0aa7bf0a-b5c3-4db6-b401-e8512e1df933-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "0aa7bf0a-b5c3-4db6-b401-e8512e1df933" (UID: "0aa7bf0a-b5c3-4db6-b401-e8512e1df933"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 14:00:43 crc kubenswrapper[4757]: I1006 14:00:43.363222 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0aa7bf0a-b5c3-4db6-b401-e8512e1df933-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "0aa7bf0a-b5c3-4db6-b401-e8512e1df933" (UID: "0aa7bf0a-b5c3-4db6-b401-e8512e1df933"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 14:00:43 crc kubenswrapper[4757]: I1006 14:00:43.400031 4757 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/0aa7bf0a-b5c3-4db6-b401-e8512e1df933-httpd-config\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:43 crc kubenswrapper[4757]: I1006 14:00:43.400064 4757 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0aa7bf0a-b5c3-4db6-b401-e8512e1df933-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:43 crc kubenswrapper[4757]: I1006 14:00:43.400075 4757 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0aa7bf0a-b5c3-4db6-b401-e8512e1df933-public-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:43 crc kubenswrapper[4757]: I1006 14:00:43.400084 4757 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/0aa7bf0a-b5c3-4db6-b401-e8512e1df933-config\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:43 crc kubenswrapper[4757]: I1006 14:00:43.400107 4757 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0aa7bf0a-b5c3-4db6-b401-e8512e1df933-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:43 crc kubenswrapper[4757]: I1006 14:00:43.400115 4757 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/0aa7bf0a-b5c3-4db6-b401-e8512e1df933-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:43 crc kubenswrapper[4757]: I1006 14:00:43.400123 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sznj2\" (UniqueName: \"kubernetes.io/projected/0aa7bf0a-b5c3-4db6-b401-e8512e1df933-kube-api-access-sznj2\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:43 crc kubenswrapper[4757]: I1006 14:00:43.439060 4757 generic.go:334] "Generic (PLEG): container finished" podID="0aa7bf0a-b5c3-4db6-b401-e8512e1df933" containerID="a6b5253b08d5415774fa5b2cc0628bffd3d26d6c9de7019fd00ae41a8fba7edf" exitCode=0 Oct 06 14:00:43 crc kubenswrapper[4757]: I1006 14:00:43.439129 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-587f5f887c-jf8v5" event={"ID":"0aa7bf0a-b5c3-4db6-b401-e8512e1df933","Type":"ContainerDied","Data":"a6b5253b08d5415774fa5b2cc0628bffd3d26d6c9de7019fd00ae41a8fba7edf"} Oct 06 14:00:43 crc kubenswrapper[4757]: I1006 14:00:43.439170 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-587f5f887c-jf8v5" event={"ID":"0aa7bf0a-b5c3-4db6-b401-e8512e1df933","Type":"ContainerDied","Data":"959c4bee82d55f9943c6527e6f86aa380d2506368a6929e96749089ca5928814"} Oct 06 14:00:43 crc kubenswrapper[4757]: I1006 14:00:43.439194 4757 scope.go:117] "RemoveContainer" containerID="bcd917c1f2b6fb1969bcd04d032fd022e5edf74f68f97b2567d296a2e2e84598" Oct 06 14:00:43 crc kubenswrapper[4757]: I1006 14:00:43.439206 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-587f5f887c-jf8v5" Oct 06 14:00:43 crc kubenswrapper[4757]: I1006 14:00:43.465784 4757 scope.go:117] "RemoveContainer" containerID="a6b5253b08d5415774fa5b2cc0628bffd3d26d6c9de7019fd00ae41a8fba7edf" Oct 06 14:00:43 crc kubenswrapper[4757]: I1006 14:00:43.486189 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-587f5f887c-jf8v5"] Oct 06 14:00:43 crc kubenswrapper[4757]: I1006 14:00:43.495965 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-587f5f887c-jf8v5"] Oct 06 14:00:43 crc kubenswrapper[4757]: I1006 14:00:43.498161 4757 scope.go:117] "RemoveContainer" containerID="bcd917c1f2b6fb1969bcd04d032fd022e5edf74f68f97b2567d296a2e2e84598" Oct 06 14:00:43 crc kubenswrapper[4757]: E1006 14:00:43.499031 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bcd917c1f2b6fb1969bcd04d032fd022e5edf74f68f97b2567d296a2e2e84598\": container with ID starting with bcd917c1f2b6fb1969bcd04d032fd022e5edf74f68f97b2567d296a2e2e84598 not found: ID does not exist" containerID="bcd917c1f2b6fb1969bcd04d032fd022e5edf74f68f97b2567d296a2e2e84598" Oct 06 14:00:43 crc kubenswrapper[4757]: I1006 14:00:43.499083 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bcd917c1f2b6fb1969bcd04d032fd022e5edf74f68f97b2567d296a2e2e84598"} err="failed to get container status \"bcd917c1f2b6fb1969bcd04d032fd022e5edf74f68f97b2567d296a2e2e84598\": rpc error: code = NotFound desc = could not find container \"bcd917c1f2b6fb1969bcd04d032fd022e5edf74f68f97b2567d296a2e2e84598\": container with ID starting with bcd917c1f2b6fb1969bcd04d032fd022e5edf74f68f97b2567d296a2e2e84598 not found: ID does not exist" Oct 06 14:00:43 crc kubenswrapper[4757]: I1006 14:00:43.499131 4757 scope.go:117] "RemoveContainer" containerID="a6b5253b08d5415774fa5b2cc0628bffd3d26d6c9de7019fd00ae41a8fba7edf" Oct 06 14:00:43 crc kubenswrapper[4757]: E1006 14:00:43.499443 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a6b5253b08d5415774fa5b2cc0628bffd3d26d6c9de7019fd00ae41a8fba7edf\": container with ID starting with a6b5253b08d5415774fa5b2cc0628bffd3d26d6c9de7019fd00ae41a8fba7edf not found: ID does not exist" containerID="a6b5253b08d5415774fa5b2cc0628bffd3d26d6c9de7019fd00ae41a8fba7edf" Oct 06 14:00:43 crc kubenswrapper[4757]: I1006 14:00:43.499565 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a6b5253b08d5415774fa5b2cc0628bffd3d26d6c9de7019fd00ae41a8fba7edf"} err="failed to get container status \"a6b5253b08d5415774fa5b2cc0628bffd3d26d6c9de7019fd00ae41a8fba7edf\": rpc error: code = NotFound desc = could not find container \"a6b5253b08d5415774fa5b2cc0628bffd3d26d6c9de7019fd00ae41a8fba7edf\": container with ID starting with a6b5253b08d5415774fa5b2cc0628bffd3d26d6c9de7019fd00ae41a8fba7edf not found: ID does not exist" Oct 06 14:00:44 crc kubenswrapper[4757]: I1006 14:00:44.192969 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0aa7bf0a-b5c3-4db6-b401-e8512e1df933" path="/var/lib/kubelet/pods/0aa7bf0a-b5c3-4db6-b401-e8512e1df933/volumes" Oct 06 14:00:46 crc kubenswrapper[4757]: E1006 14:00:46.594995 4757 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 21bc61d474e55f8578f65885857cf7de3c24140a6afe332ad8c23da23b86ee86 is running failed: container process not found" containerID="21bc61d474e55f8578f65885857cf7de3c24140a6afe332ad8c23da23b86ee86" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Oct 06 14:00:46 crc kubenswrapper[4757]: E1006 14:00:46.596051 4757 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 21bc61d474e55f8578f65885857cf7de3c24140a6afe332ad8c23da23b86ee86 is running failed: container process not found" containerID="21bc61d474e55f8578f65885857cf7de3c24140a6afe332ad8c23da23b86ee86" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Oct 06 14:00:46 crc kubenswrapper[4757]: E1006 14:00:46.596430 4757 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="ca5e23427b9e45a1476331fc4c8830e07091551016296db6fab1e5fc1115f6e4" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Oct 06 14:00:46 crc kubenswrapper[4757]: E1006 14:00:46.596775 4757 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 21bc61d474e55f8578f65885857cf7de3c24140a6afe332ad8c23da23b86ee86 is running failed: container process not found" containerID="21bc61d474e55f8578f65885857cf7de3c24140a6afe332ad8c23da23b86ee86" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Oct 06 14:00:46 crc kubenswrapper[4757]: E1006 14:00:46.596848 4757 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 21bc61d474e55f8578f65885857cf7de3c24140a6afe332ad8c23da23b86ee86 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-fqwwx" podUID="3482c1c4-b15b-46cb-a897-3528fa22adda" containerName="ovsdb-server" Oct 06 14:00:46 crc kubenswrapper[4757]: E1006 14:00:46.600434 4757 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="ca5e23427b9e45a1476331fc4c8830e07091551016296db6fab1e5fc1115f6e4" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Oct 06 14:00:46 crc kubenswrapper[4757]: E1006 14:00:46.602955 4757 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="ca5e23427b9e45a1476331fc4c8830e07091551016296db6fab1e5fc1115f6e4" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Oct 06 14:00:46 crc kubenswrapper[4757]: E1006 14:00:46.602998 4757 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-fqwwx" podUID="3482c1c4-b15b-46cb-a897-3528fa22adda" containerName="ovs-vswitchd" Oct 06 14:00:51 crc kubenswrapper[4757]: I1006 14:00:51.090829 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Oct 06 14:00:51 crc kubenswrapper[4757]: I1006 14:00:51.243698 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e-lock\") pod \"cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e\" (UID: \"cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e\") " Oct 06 14:00:51 crc kubenswrapper[4757]: I1006 14:00:51.243804 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g72g\" (UniqueName: \"kubernetes.io/projected/cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e-kube-api-access-6g72g\") pod \"cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e\" (UID: \"cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e\") " Oct 06 14:00:51 crc kubenswrapper[4757]: I1006 14:00:51.243845 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e-etc-swift\") pod \"cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e\" (UID: \"cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e\") " Oct 06 14:00:51 crc kubenswrapper[4757]: I1006 14:00:51.243867 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swift\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e\" (UID: \"cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e\") " Oct 06 14:00:51 crc kubenswrapper[4757]: I1006 14:00:51.243895 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e-cache\") pod \"cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e\" (UID: \"cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e\") " Oct 06 14:00:51 crc kubenswrapper[4757]: I1006 14:00:51.244525 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e-cache" (OuterVolumeSpecName: "cache") pod "cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e" (UID: "cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e"). InnerVolumeSpecName "cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 14:00:51 crc kubenswrapper[4757]: I1006 14:00:51.244930 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e-lock" (OuterVolumeSpecName: "lock") pod "cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e" (UID: "cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e"). InnerVolumeSpecName "lock". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 14:00:51 crc kubenswrapper[4757]: I1006 14:00:51.249664 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e-kube-api-access-6g72g" (OuterVolumeSpecName: "kube-api-access-6g72g") pod "cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e" (UID: "cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e"). InnerVolumeSpecName "kube-api-access-6g72g". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 14:00:51 crc kubenswrapper[4757]: I1006 14:00:51.249941 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e" (UID: "cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 14:00:51 crc kubenswrapper[4757]: I1006 14:00:51.250306 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage01-crc" (OuterVolumeSpecName: "swift") pod "cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e" (UID: "cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e"). InnerVolumeSpecName "local-storage01-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 06 14:00:51 crc kubenswrapper[4757]: I1006 14:00:51.346937 4757 reconciler_common.go:293] "Volume detached for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e-lock\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:51 crc kubenswrapper[4757]: I1006 14:00:51.347067 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g72g\" (UniqueName: \"kubernetes.io/projected/cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e-kube-api-access-6g72g\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:51 crc kubenswrapper[4757]: I1006 14:00:51.347089 4757 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e-etc-swift\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:51 crc kubenswrapper[4757]: I1006 14:00:51.347178 4757 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" " Oct 06 14:00:51 crc kubenswrapper[4757]: I1006 14:00:51.347203 4757 reconciler_common.go:293] "Volume detached for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e-cache\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:51 crc kubenswrapper[4757]: I1006 14:00:51.375847 4757 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage01-crc" (UniqueName: "kubernetes.io/local-volume/local-storage01-crc") on node "crc" Oct 06 14:00:51 crc kubenswrapper[4757]: I1006 14:00:51.448671 4757 reconciler_common.go:293] "Volume detached for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:51 crc kubenswrapper[4757]: I1006 14:00:51.534837 4757 generic.go:334] "Generic (PLEG): container finished" podID="cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e" containerID="a8dbabfcf162761d8bcdf1b87762716bfadb00917af0e64430a06f6608c87e0b" exitCode=137 Oct 06 14:00:51 crc kubenswrapper[4757]: I1006 14:00:51.534920 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e","Type":"ContainerDied","Data":"a8dbabfcf162761d8bcdf1b87762716bfadb00917af0e64430a06f6608c87e0b"} Oct 06 14:00:51 crc kubenswrapper[4757]: I1006 14:00:51.534952 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e","Type":"ContainerDied","Data":"363e1baa670fcb46278dc06e1f0ddf747b310ed231897f327adc82e7bb2771b2"} Oct 06 14:00:51 crc kubenswrapper[4757]: I1006 14:00:51.534974 4757 scope.go:117] "RemoveContainer" containerID="a8dbabfcf162761d8bcdf1b87762716bfadb00917af0e64430a06f6608c87e0b" Oct 06 14:00:51 crc kubenswrapper[4757]: I1006 14:00:51.535047 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Oct 06 14:00:51 crc kubenswrapper[4757]: I1006 14:00:51.538510 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-fqwwx_3482c1c4-b15b-46cb-a897-3528fa22adda/ovs-vswitchd/0.log" Oct 06 14:00:51 crc kubenswrapper[4757]: I1006 14:00:51.539705 4757 generic.go:334] "Generic (PLEG): container finished" podID="3482c1c4-b15b-46cb-a897-3528fa22adda" containerID="ca5e23427b9e45a1476331fc4c8830e07091551016296db6fab1e5fc1115f6e4" exitCode=137 Oct 06 14:00:51 crc kubenswrapper[4757]: I1006 14:00:51.539766 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-fqwwx" event={"ID":"3482c1c4-b15b-46cb-a897-3528fa22adda","Type":"ContainerDied","Data":"ca5e23427b9e45a1476331fc4c8830e07091551016296db6fab1e5fc1115f6e4"} Oct 06 14:00:51 crc kubenswrapper[4757]: I1006 14:00:51.560477 4757 scope.go:117] "RemoveContainer" containerID="fdf14c9e9a23dfbae4c2900b46030a75c455df62b46a889978d4c18782fa297e" Oct 06 14:00:51 crc kubenswrapper[4757]: I1006 14:00:51.591036 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-storage-0"] Oct 06 14:00:51 crc kubenswrapper[4757]: E1006 14:00:51.591973 4757 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ca5e23427b9e45a1476331fc4c8830e07091551016296db6fab1e5fc1115f6e4 is running failed: container process not found" containerID="ca5e23427b9e45a1476331fc4c8830e07091551016296db6fab1e5fc1115f6e4" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Oct 06 14:00:51 crc kubenswrapper[4757]: E1006 14:00:51.592423 4757 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 21bc61d474e55f8578f65885857cf7de3c24140a6afe332ad8c23da23b86ee86 is running failed: container process not found" containerID="21bc61d474e55f8578f65885857cf7de3c24140a6afe332ad8c23da23b86ee86" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Oct 06 14:00:51 crc kubenswrapper[4757]: E1006 14:00:51.593311 4757 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 21bc61d474e55f8578f65885857cf7de3c24140a6afe332ad8c23da23b86ee86 is running failed: container process not found" containerID="21bc61d474e55f8578f65885857cf7de3c24140a6afe332ad8c23da23b86ee86" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Oct 06 14:00:51 crc kubenswrapper[4757]: E1006 14:00:51.593487 4757 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ca5e23427b9e45a1476331fc4c8830e07091551016296db6fab1e5fc1115f6e4 is running failed: container process not found" containerID="ca5e23427b9e45a1476331fc4c8830e07091551016296db6fab1e5fc1115f6e4" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Oct 06 14:00:51 crc kubenswrapper[4757]: E1006 14:00:51.593734 4757 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 21bc61d474e55f8578f65885857cf7de3c24140a6afe332ad8c23da23b86ee86 is running failed: container process not found" containerID="21bc61d474e55f8578f65885857cf7de3c24140a6afe332ad8c23da23b86ee86" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Oct 06 14:00:51 crc kubenswrapper[4757]: E1006 14:00:51.593797 4757 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 21bc61d474e55f8578f65885857cf7de3c24140a6afe332ad8c23da23b86ee86 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-fqwwx" podUID="3482c1c4-b15b-46cb-a897-3528fa22adda" containerName="ovsdb-server" Oct 06 14:00:51 crc kubenswrapper[4757]: E1006 14:00:51.593923 4757 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ca5e23427b9e45a1476331fc4c8830e07091551016296db6fab1e5fc1115f6e4 is running failed: container process not found" containerID="ca5e23427b9e45a1476331fc4c8830e07091551016296db6fab1e5fc1115f6e4" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Oct 06 14:00:51 crc kubenswrapper[4757]: E1006 14:00:51.594045 4757 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ca5e23427b9e45a1476331fc4c8830e07091551016296db6fab1e5fc1115f6e4 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-fqwwx" podUID="3482c1c4-b15b-46cb-a897-3528fa22adda" containerName="ovs-vswitchd" Oct 06 14:00:51 crc kubenswrapper[4757]: I1006 14:00:51.597080 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/swift-storage-0"] Oct 06 14:00:51 crc kubenswrapper[4757]: I1006 14:00:51.602197 4757 scope.go:117] "RemoveContainer" containerID="b83693af5dd62fa1415425945aa0fdfbeb7c951800bfc45ba952ad8e5300972e" Oct 06 14:00:51 crc kubenswrapper[4757]: I1006 14:00:51.623317 4757 scope.go:117] "RemoveContainer" containerID="c590ad932f6bd2ffc90af02ba2b42481210297495161a7a724ec4887afc67fa4" Oct 06 14:00:51 crc kubenswrapper[4757]: I1006 14:00:51.643315 4757 scope.go:117] "RemoveContainer" containerID="425e4cf04bb701acf0bd6544c54c034243bb49ca6b9336c4557b3370b950c0a1" Oct 06 14:00:51 crc kubenswrapper[4757]: I1006 14:00:51.662164 4757 scope.go:117] "RemoveContainer" containerID="47cf60ed76e43b20f51da65306480b7f176e194d4f0852c1c215ef43e7f66636" Oct 06 14:00:51 crc kubenswrapper[4757]: I1006 14:00:51.673189 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-fqwwx_3482c1c4-b15b-46cb-a897-3528fa22adda/ovs-vswitchd/0.log" Oct 06 14:00:51 crc kubenswrapper[4757]: I1006 14:00:51.674083 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-fqwwx" Oct 06 14:00:51 crc kubenswrapper[4757]: I1006 14:00:51.681148 4757 scope.go:117] "RemoveContainer" containerID="128957938cd8f8a94388394a5a1f34e6af0a9c8532220084d8c4cd91a84f64e4" Oct 06 14:00:51 crc kubenswrapper[4757]: I1006 14:00:51.752263 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3482c1c4-b15b-46cb-a897-3528fa22adda-scripts\") pod \"3482c1c4-b15b-46cb-a897-3528fa22adda\" (UID: \"3482c1c4-b15b-46cb-a897-3528fa22adda\") " Oct 06 14:00:51 crc kubenswrapper[4757]: I1006 14:00:51.752786 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/3482c1c4-b15b-46cb-a897-3528fa22adda-var-run\") pod \"3482c1c4-b15b-46cb-a897-3528fa22adda\" (UID: \"3482c1c4-b15b-46cb-a897-3528fa22adda\") " Oct 06 14:00:51 crc kubenswrapper[4757]: I1006 14:00:51.752887 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/3482c1c4-b15b-46cb-a897-3528fa22adda-etc-ovs\") pod \"3482c1c4-b15b-46cb-a897-3528fa22adda\" (UID: \"3482c1c4-b15b-46cb-a897-3528fa22adda\") " Oct 06 14:00:51 crc kubenswrapper[4757]: I1006 14:00:51.752965 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/3482c1c4-b15b-46cb-a897-3528fa22adda-var-log\") pod \"3482c1c4-b15b-46cb-a897-3528fa22adda\" (UID: \"3482c1c4-b15b-46cb-a897-3528fa22adda\") " Oct 06 14:00:51 crc kubenswrapper[4757]: I1006 14:00:51.753029 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/3482c1c4-b15b-46cb-a897-3528fa22adda-var-lib\") pod \"3482c1c4-b15b-46cb-a897-3528fa22adda\" (UID: \"3482c1c4-b15b-46cb-a897-3528fa22adda\") " Oct 06 14:00:51 crc kubenswrapper[4757]: I1006 14:00:51.753156 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wsx5h\" (UniqueName: \"kubernetes.io/projected/3482c1c4-b15b-46cb-a897-3528fa22adda-kube-api-access-wsx5h\") pod \"3482c1c4-b15b-46cb-a897-3528fa22adda\" (UID: \"3482c1c4-b15b-46cb-a897-3528fa22adda\") " Oct 06 14:00:51 crc kubenswrapper[4757]: I1006 14:00:51.753784 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3482c1c4-b15b-46cb-a897-3528fa22adda-scripts" (OuterVolumeSpecName: "scripts") pod "3482c1c4-b15b-46cb-a897-3528fa22adda" (UID: "3482c1c4-b15b-46cb-a897-3528fa22adda"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 14:00:51 crc kubenswrapper[4757]: I1006 14:00:51.753856 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3482c1c4-b15b-46cb-a897-3528fa22adda-etc-ovs" (OuterVolumeSpecName: "etc-ovs") pod "3482c1c4-b15b-46cb-a897-3528fa22adda" (UID: "3482c1c4-b15b-46cb-a897-3528fa22adda"). InnerVolumeSpecName "etc-ovs". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 06 14:00:51 crc kubenswrapper[4757]: I1006 14:00:51.753884 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3482c1c4-b15b-46cb-a897-3528fa22adda-var-run" (OuterVolumeSpecName: "var-run") pod "3482c1c4-b15b-46cb-a897-3528fa22adda" (UID: "3482c1c4-b15b-46cb-a897-3528fa22adda"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 06 14:00:51 crc kubenswrapper[4757]: I1006 14:00:51.753908 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3482c1c4-b15b-46cb-a897-3528fa22adda-var-log" (OuterVolumeSpecName: "var-log") pod "3482c1c4-b15b-46cb-a897-3528fa22adda" (UID: "3482c1c4-b15b-46cb-a897-3528fa22adda"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 06 14:00:51 crc kubenswrapper[4757]: I1006 14:00:51.753929 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3482c1c4-b15b-46cb-a897-3528fa22adda-var-lib" (OuterVolumeSpecName: "var-lib") pod "3482c1c4-b15b-46cb-a897-3528fa22adda" (UID: "3482c1c4-b15b-46cb-a897-3528fa22adda"). InnerVolumeSpecName "var-lib". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 06 14:00:51 crc kubenswrapper[4757]: I1006 14:00:51.761441 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3482c1c4-b15b-46cb-a897-3528fa22adda-kube-api-access-wsx5h" (OuterVolumeSpecName: "kube-api-access-wsx5h") pod "3482c1c4-b15b-46cb-a897-3528fa22adda" (UID: "3482c1c4-b15b-46cb-a897-3528fa22adda"). InnerVolumeSpecName "kube-api-access-wsx5h". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 14:00:51 crc kubenswrapper[4757]: I1006 14:00:51.761488 4757 scope.go:117] "RemoveContainer" containerID="2ed56ae25da969c6f1c6035ee758bb3bdff3f5231db2cbaa897ed5f375a124d2" Oct 06 14:00:51 crc kubenswrapper[4757]: I1006 14:00:51.798844 4757 scope.go:117] "RemoveContainer" containerID="aa31fc47a53cc26c6c2aeebfb6be0d53ecc196f7288a242d80a2314eaefca60c" Oct 06 14:00:51 crc kubenswrapper[4757]: I1006 14:00:51.818901 4757 scope.go:117] "RemoveContainer" containerID="b197392cb5a2b7307583df8b0fbe8691aa093fba29ccd5a59dcfd082146a01a1" Oct 06 14:00:51 crc kubenswrapper[4757]: I1006 14:00:51.834047 4757 scope.go:117] "RemoveContainer" containerID="4a3e474b0906c602fa42d67ff9a1ce42690ad02c00ae4982e1e8e4c832a3b23e" Oct 06 14:00:51 crc kubenswrapper[4757]: I1006 14:00:51.851325 4757 scope.go:117] "RemoveContainer" containerID="310e750d598b4155f3b7c06f9255cb7e0c3fd72dc7ef791abab66abcde43d44e" Oct 06 14:00:51 crc kubenswrapper[4757]: I1006 14:00:51.854514 4757 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3482c1c4-b15b-46cb-a897-3528fa22adda-scripts\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:51 crc kubenswrapper[4757]: I1006 14:00:51.854547 4757 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/3482c1c4-b15b-46cb-a897-3528fa22adda-var-run\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:51 crc kubenswrapper[4757]: I1006 14:00:51.854556 4757 reconciler_common.go:293] "Volume detached for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/3482c1c4-b15b-46cb-a897-3528fa22adda-etc-ovs\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:51 crc kubenswrapper[4757]: I1006 14:00:51.854565 4757 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/3482c1c4-b15b-46cb-a897-3528fa22adda-var-log\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:51 crc kubenswrapper[4757]: I1006 14:00:51.854574 4757 reconciler_common.go:293] "Volume detached for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/3482c1c4-b15b-46cb-a897-3528fa22adda-var-lib\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:51 crc kubenswrapper[4757]: I1006 14:00:51.854585 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wsx5h\" (UniqueName: \"kubernetes.io/projected/3482c1c4-b15b-46cb-a897-3528fa22adda-kube-api-access-wsx5h\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:51 crc kubenswrapper[4757]: I1006 14:00:51.866903 4757 scope.go:117] "RemoveContainer" containerID="9e6653a1be3ad11eb600a3fb5b459d7efe07dd5b5f22cddfe18a97d785f0ab92" Oct 06 14:00:51 crc kubenswrapper[4757]: I1006 14:00:51.890302 4757 scope.go:117] "RemoveContainer" containerID="586142d874234725ea31d7fbbe5be2188dabc0eb32d3421991ab34ac2db87ceb" Oct 06 14:00:51 crc kubenswrapper[4757]: I1006 14:00:51.914816 4757 scope.go:117] "RemoveContainer" containerID="4e76a7ffee77c8b8fe521732d3733beb6c04f9473f44fd387b15b45fb090db2a" Oct 06 14:00:51 crc kubenswrapper[4757]: I1006 14:00:51.933280 4757 scope.go:117] "RemoveContainer" containerID="a8dbabfcf162761d8bcdf1b87762716bfadb00917af0e64430a06f6608c87e0b" Oct 06 14:00:51 crc kubenswrapper[4757]: E1006 14:00:51.933792 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a8dbabfcf162761d8bcdf1b87762716bfadb00917af0e64430a06f6608c87e0b\": container with ID starting with a8dbabfcf162761d8bcdf1b87762716bfadb00917af0e64430a06f6608c87e0b not found: ID does not exist" containerID="a8dbabfcf162761d8bcdf1b87762716bfadb00917af0e64430a06f6608c87e0b" Oct 06 14:00:51 crc kubenswrapper[4757]: I1006 14:00:51.933873 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a8dbabfcf162761d8bcdf1b87762716bfadb00917af0e64430a06f6608c87e0b"} err="failed to get container status \"a8dbabfcf162761d8bcdf1b87762716bfadb00917af0e64430a06f6608c87e0b\": rpc error: code = NotFound desc = could not find container \"a8dbabfcf162761d8bcdf1b87762716bfadb00917af0e64430a06f6608c87e0b\": container with ID starting with a8dbabfcf162761d8bcdf1b87762716bfadb00917af0e64430a06f6608c87e0b not found: ID does not exist" Oct 06 14:00:51 crc kubenswrapper[4757]: I1006 14:00:51.933920 4757 scope.go:117] "RemoveContainer" containerID="fdf14c9e9a23dfbae4c2900b46030a75c455df62b46a889978d4c18782fa297e" Oct 06 14:00:51 crc kubenswrapper[4757]: E1006 14:00:51.934811 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fdf14c9e9a23dfbae4c2900b46030a75c455df62b46a889978d4c18782fa297e\": container with ID starting with fdf14c9e9a23dfbae4c2900b46030a75c455df62b46a889978d4c18782fa297e not found: ID does not exist" containerID="fdf14c9e9a23dfbae4c2900b46030a75c455df62b46a889978d4c18782fa297e" Oct 06 14:00:51 crc kubenswrapper[4757]: I1006 14:00:51.934864 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fdf14c9e9a23dfbae4c2900b46030a75c455df62b46a889978d4c18782fa297e"} err="failed to get container status \"fdf14c9e9a23dfbae4c2900b46030a75c455df62b46a889978d4c18782fa297e\": rpc error: code = NotFound desc = could not find container \"fdf14c9e9a23dfbae4c2900b46030a75c455df62b46a889978d4c18782fa297e\": container with ID starting with fdf14c9e9a23dfbae4c2900b46030a75c455df62b46a889978d4c18782fa297e not found: ID does not exist" Oct 06 14:00:51 crc kubenswrapper[4757]: I1006 14:00:51.934900 4757 scope.go:117] "RemoveContainer" containerID="b83693af5dd62fa1415425945aa0fdfbeb7c951800bfc45ba952ad8e5300972e" Oct 06 14:00:51 crc kubenswrapper[4757]: E1006 14:00:51.935219 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b83693af5dd62fa1415425945aa0fdfbeb7c951800bfc45ba952ad8e5300972e\": container with ID starting with b83693af5dd62fa1415425945aa0fdfbeb7c951800bfc45ba952ad8e5300972e not found: ID does not exist" containerID="b83693af5dd62fa1415425945aa0fdfbeb7c951800bfc45ba952ad8e5300972e" Oct 06 14:00:51 crc kubenswrapper[4757]: I1006 14:00:51.935262 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b83693af5dd62fa1415425945aa0fdfbeb7c951800bfc45ba952ad8e5300972e"} err="failed to get container status \"b83693af5dd62fa1415425945aa0fdfbeb7c951800bfc45ba952ad8e5300972e\": rpc error: code = NotFound desc = could not find container \"b83693af5dd62fa1415425945aa0fdfbeb7c951800bfc45ba952ad8e5300972e\": container with ID starting with b83693af5dd62fa1415425945aa0fdfbeb7c951800bfc45ba952ad8e5300972e not found: ID does not exist" Oct 06 14:00:51 crc kubenswrapper[4757]: I1006 14:00:51.935290 4757 scope.go:117] "RemoveContainer" containerID="c590ad932f6bd2ffc90af02ba2b42481210297495161a7a724ec4887afc67fa4" Oct 06 14:00:51 crc kubenswrapper[4757]: E1006 14:00:51.935658 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c590ad932f6bd2ffc90af02ba2b42481210297495161a7a724ec4887afc67fa4\": container with ID starting with c590ad932f6bd2ffc90af02ba2b42481210297495161a7a724ec4887afc67fa4 not found: ID does not exist" containerID="c590ad932f6bd2ffc90af02ba2b42481210297495161a7a724ec4887afc67fa4" Oct 06 14:00:51 crc kubenswrapper[4757]: I1006 14:00:51.935756 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c590ad932f6bd2ffc90af02ba2b42481210297495161a7a724ec4887afc67fa4"} err="failed to get container status \"c590ad932f6bd2ffc90af02ba2b42481210297495161a7a724ec4887afc67fa4\": rpc error: code = NotFound desc = could not find container \"c590ad932f6bd2ffc90af02ba2b42481210297495161a7a724ec4887afc67fa4\": container with ID starting with c590ad932f6bd2ffc90af02ba2b42481210297495161a7a724ec4887afc67fa4 not found: ID does not exist" Oct 06 14:00:51 crc kubenswrapper[4757]: I1006 14:00:51.935838 4757 scope.go:117] "RemoveContainer" containerID="425e4cf04bb701acf0bd6544c54c034243bb49ca6b9336c4557b3370b950c0a1" Oct 06 14:00:51 crc kubenswrapper[4757]: E1006 14:00:51.936314 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"425e4cf04bb701acf0bd6544c54c034243bb49ca6b9336c4557b3370b950c0a1\": container with ID starting with 425e4cf04bb701acf0bd6544c54c034243bb49ca6b9336c4557b3370b950c0a1 not found: ID does not exist" containerID="425e4cf04bb701acf0bd6544c54c034243bb49ca6b9336c4557b3370b950c0a1" Oct 06 14:00:51 crc kubenswrapper[4757]: I1006 14:00:51.936357 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"425e4cf04bb701acf0bd6544c54c034243bb49ca6b9336c4557b3370b950c0a1"} err="failed to get container status \"425e4cf04bb701acf0bd6544c54c034243bb49ca6b9336c4557b3370b950c0a1\": rpc error: code = NotFound desc = could not find container \"425e4cf04bb701acf0bd6544c54c034243bb49ca6b9336c4557b3370b950c0a1\": container with ID starting with 425e4cf04bb701acf0bd6544c54c034243bb49ca6b9336c4557b3370b950c0a1 not found: ID does not exist" Oct 06 14:00:51 crc kubenswrapper[4757]: I1006 14:00:51.936388 4757 scope.go:117] "RemoveContainer" containerID="47cf60ed76e43b20f51da65306480b7f176e194d4f0852c1c215ef43e7f66636" Oct 06 14:00:51 crc kubenswrapper[4757]: E1006 14:00:51.936813 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"47cf60ed76e43b20f51da65306480b7f176e194d4f0852c1c215ef43e7f66636\": container with ID starting with 47cf60ed76e43b20f51da65306480b7f176e194d4f0852c1c215ef43e7f66636 not found: ID does not exist" containerID="47cf60ed76e43b20f51da65306480b7f176e194d4f0852c1c215ef43e7f66636" Oct 06 14:00:51 crc kubenswrapper[4757]: I1006 14:00:51.936906 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"47cf60ed76e43b20f51da65306480b7f176e194d4f0852c1c215ef43e7f66636"} err="failed to get container status \"47cf60ed76e43b20f51da65306480b7f176e194d4f0852c1c215ef43e7f66636\": rpc error: code = NotFound desc = could not find container \"47cf60ed76e43b20f51da65306480b7f176e194d4f0852c1c215ef43e7f66636\": container with ID starting with 47cf60ed76e43b20f51da65306480b7f176e194d4f0852c1c215ef43e7f66636 not found: ID does not exist" Oct 06 14:00:51 crc kubenswrapper[4757]: I1006 14:00:51.936995 4757 scope.go:117] "RemoveContainer" containerID="128957938cd8f8a94388394a5a1f34e6af0a9c8532220084d8c4cd91a84f64e4" Oct 06 14:00:51 crc kubenswrapper[4757]: E1006 14:00:51.937732 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"128957938cd8f8a94388394a5a1f34e6af0a9c8532220084d8c4cd91a84f64e4\": container with ID starting with 128957938cd8f8a94388394a5a1f34e6af0a9c8532220084d8c4cd91a84f64e4 not found: ID does not exist" containerID="128957938cd8f8a94388394a5a1f34e6af0a9c8532220084d8c4cd91a84f64e4" Oct 06 14:00:51 crc kubenswrapper[4757]: I1006 14:00:51.937792 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"128957938cd8f8a94388394a5a1f34e6af0a9c8532220084d8c4cd91a84f64e4"} err="failed to get container status \"128957938cd8f8a94388394a5a1f34e6af0a9c8532220084d8c4cd91a84f64e4\": rpc error: code = NotFound desc = could not find container \"128957938cd8f8a94388394a5a1f34e6af0a9c8532220084d8c4cd91a84f64e4\": container with ID starting with 128957938cd8f8a94388394a5a1f34e6af0a9c8532220084d8c4cd91a84f64e4 not found: ID does not exist" Oct 06 14:00:51 crc kubenswrapper[4757]: I1006 14:00:51.937826 4757 scope.go:117] "RemoveContainer" containerID="2ed56ae25da969c6f1c6035ee758bb3bdff3f5231db2cbaa897ed5f375a124d2" Oct 06 14:00:51 crc kubenswrapper[4757]: E1006 14:00:51.938187 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2ed56ae25da969c6f1c6035ee758bb3bdff3f5231db2cbaa897ed5f375a124d2\": container with ID starting with 2ed56ae25da969c6f1c6035ee758bb3bdff3f5231db2cbaa897ed5f375a124d2 not found: ID does not exist" containerID="2ed56ae25da969c6f1c6035ee758bb3bdff3f5231db2cbaa897ed5f375a124d2" Oct 06 14:00:51 crc kubenswrapper[4757]: I1006 14:00:51.938242 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2ed56ae25da969c6f1c6035ee758bb3bdff3f5231db2cbaa897ed5f375a124d2"} err="failed to get container status \"2ed56ae25da969c6f1c6035ee758bb3bdff3f5231db2cbaa897ed5f375a124d2\": rpc error: code = NotFound desc = could not find container \"2ed56ae25da969c6f1c6035ee758bb3bdff3f5231db2cbaa897ed5f375a124d2\": container with ID starting with 2ed56ae25da969c6f1c6035ee758bb3bdff3f5231db2cbaa897ed5f375a124d2 not found: ID does not exist" Oct 06 14:00:51 crc kubenswrapper[4757]: I1006 14:00:51.938278 4757 scope.go:117] "RemoveContainer" containerID="aa31fc47a53cc26c6c2aeebfb6be0d53ecc196f7288a242d80a2314eaefca60c" Oct 06 14:00:51 crc kubenswrapper[4757]: E1006 14:00:51.938575 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aa31fc47a53cc26c6c2aeebfb6be0d53ecc196f7288a242d80a2314eaefca60c\": container with ID starting with aa31fc47a53cc26c6c2aeebfb6be0d53ecc196f7288a242d80a2314eaefca60c not found: ID does not exist" containerID="aa31fc47a53cc26c6c2aeebfb6be0d53ecc196f7288a242d80a2314eaefca60c" Oct 06 14:00:51 crc kubenswrapper[4757]: I1006 14:00:51.938622 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aa31fc47a53cc26c6c2aeebfb6be0d53ecc196f7288a242d80a2314eaefca60c"} err="failed to get container status \"aa31fc47a53cc26c6c2aeebfb6be0d53ecc196f7288a242d80a2314eaefca60c\": rpc error: code = NotFound desc = could not find container \"aa31fc47a53cc26c6c2aeebfb6be0d53ecc196f7288a242d80a2314eaefca60c\": container with ID starting with aa31fc47a53cc26c6c2aeebfb6be0d53ecc196f7288a242d80a2314eaefca60c not found: ID does not exist" Oct 06 14:00:51 crc kubenswrapper[4757]: I1006 14:00:51.938651 4757 scope.go:117] "RemoveContainer" containerID="b197392cb5a2b7307583df8b0fbe8691aa093fba29ccd5a59dcfd082146a01a1" Oct 06 14:00:51 crc kubenswrapper[4757]: E1006 14:00:51.939255 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b197392cb5a2b7307583df8b0fbe8691aa093fba29ccd5a59dcfd082146a01a1\": container with ID starting with b197392cb5a2b7307583df8b0fbe8691aa093fba29ccd5a59dcfd082146a01a1 not found: ID does not exist" containerID="b197392cb5a2b7307583df8b0fbe8691aa093fba29ccd5a59dcfd082146a01a1" Oct 06 14:00:51 crc kubenswrapper[4757]: I1006 14:00:51.939296 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b197392cb5a2b7307583df8b0fbe8691aa093fba29ccd5a59dcfd082146a01a1"} err="failed to get container status \"b197392cb5a2b7307583df8b0fbe8691aa093fba29ccd5a59dcfd082146a01a1\": rpc error: code = NotFound desc = could not find container \"b197392cb5a2b7307583df8b0fbe8691aa093fba29ccd5a59dcfd082146a01a1\": container with ID starting with b197392cb5a2b7307583df8b0fbe8691aa093fba29ccd5a59dcfd082146a01a1 not found: ID does not exist" Oct 06 14:00:51 crc kubenswrapper[4757]: I1006 14:00:51.939325 4757 scope.go:117] "RemoveContainer" containerID="4a3e474b0906c602fa42d67ff9a1ce42690ad02c00ae4982e1e8e4c832a3b23e" Oct 06 14:00:51 crc kubenswrapper[4757]: E1006 14:00:51.939597 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4a3e474b0906c602fa42d67ff9a1ce42690ad02c00ae4982e1e8e4c832a3b23e\": container with ID starting with 4a3e474b0906c602fa42d67ff9a1ce42690ad02c00ae4982e1e8e4c832a3b23e not found: ID does not exist" containerID="4a3e474b0906c602fa42d67ff9a1ce42690ad02c00ae4982e1e8e4c832a3b23e" Oct 06 14:00:51 crc kubenswrapper[4757]: I1006 14:00:51.939644 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4a3e474b0906c602fa42d67ff9a1ce42690ad02c00ae4982e1e8e4c832a3b23e"} err="failed to get container status \"4a3e474b0906c602fa42d67ff9a1ce42690ad02c00ae4982e1e8e4c832a3b23e\": rpc error: code = NotFound desc = could not find container \"4a3e474b0906c602fa42d67ff9a1ce42690ad02c00ae4982e1e8e4c832a3b23e\": container with ID starting with 4a3e474b0906c602fa42d67ff9a1ce42690ad02c00ae4982e1e8e4c832a3b23e not found: ID does not exist" Oct 06 14:00:51 crc kubenswrapper[4757]: I1006 14:00:51.939684 4757 scope.go:117] "RemoveContainer" containerID="310e750d598b4155f3b7c06f9255cb7e0c3fd72dc7ef791abab66abcde43d44e" Oct 06 14:00:51 crc kubenswrapper[4757]: E1006 14:00:51.940071 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"310e750d598b4155f3b7c06f9255cb7e0c3fd72dc7ef791abab66abcde43d44e\": container with ID starting with 310e750d598b4155f3b7c06f9255cb7e0c3fd72dc7ef791abab66abcde43d44e not found: ID does not exist" containerID="310e750d598b4155f3b7c06f9255cb7e0c3fd72dc7ef791abab66abcde43d44e" Oct 06 14:00:51 crc kubenswrapper[4757]: I1006 14:00:51.940165 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"310e750d598b4155f3b7c06f9255cb7e0c3fd72dc7ef791abab66abcde43d44e"} err="failed to get container status \"310e750d598b4155f3b7c06f9255cb7e0c3fd72dc7ef791abab66abcde43d44e\": rpc error: code = NotFound desc = could not find container \"310e750d598b4155f3b7c06f9255cb7e0c3fd72dc7ef791abab66abcde43d44e\": container with ID starting with 310e750d598b4155f3b7c06f9255cb7e0c3fd72dc7ef791abab66abcde43d44e not found: ID does not exist" Oct 06 14:00:51 crc kubenswrapper[4757]: I1006 14:00:51.940202 4757 scope.go:117] "RemoveContainer" containerID="9e6653a1be3ad11eb600a3fb5b459d7efe07dd5b5f22cddfe18a97d785f0ab92" Oct 06 14:00:51 crc kubenswrapper[4757]: E1006 14:00:51.940521 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9e6653a1be3ad11eb600a3fb5b459d7efe07dd5b5f22cddfe18a97d785f0ab92\": container with ID starting with 9e6653a1be3ad11eb600a3fb5b459d7efe07dd5b5f22cddfe18a97d785f0ab92 not found: ID does not exist" containerID="9e6653a1be3ad11eb600a3fb5b459d7efe07dd5b5f22cddfe18a97d785f0ab92" Oct 06 14:00:51 crc kubenswrapper[4757]: I1006 14:00:51.940560 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9e6653a1be3ad11eb600a3fb5b459d7efe07dd5b5f22cddfe18a97d785f0ab92"} err="failed to get container status \"9e6653a1be3ad11eb600a3fb5b459d7efe07dd5b5f22cddfe18a97d785f0ab92\": rpc error: code = NotFound desc = could not find container \"9e6653a1be3ad11eb600a3fb5b459d7efe07dd5b5f22cddfe18a97d785f0ab92\": container with ID starting with 9e6653a1be3ad11eb600a3fb5b459d7efe07dd5b5f22cddfe18a97d785f0ab92 not found: ID does not exist" Oct 06 14:00:51 crc kubenswrapper[4757]: I1006 14:00:51.940586 4757 scope.go:117] "RemoveContainer" containerID="586142d874234725ea31d7fbbe5be2188dabc0eb32d3421991ab34ac2db87ceb" Oct 06 14:00:51 crc kubenswrapper[4757]: E1006 14:00:51.940850 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"586142d874234725ea31d7fbbe5be2188dabc0eb32d3421991ab34ac2db87ceb\": container with ID starting with 586142d874234725ea31d7fbbe5be2188dabc0eb32d3421991ab34ac2db87ceb not found: ID does not exist" containerID="586142d874234725ea31d7fbbe5be2188dabc0eb32d3421991ab34ac2db87ceb" Oct 06 14:00:51 crc kubenswrapper[4757]: I1006 14:00:51.940888 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"586142d874234725ea31d7fbbe5be2188dabc0eb32d3421991ab34ac2db87ceb"} err="failed to get container status \"586142d874234725ea31d7fbbe5be2188dabc0eb32d3421991ab34ac2db87ceb\": rpc error: code = NotFound desc = could not find container \"586142d874234725ea31d7fbbe5be2188dabc0eb32d3421991ab34ac2db87ceb\": container with ID starting with 586142d874234725ea31d7fbbe5be2188dabc0eb32d3421991ab34ac2db87ceb not found: ID does not exist" Oct 06 14:00:51 crc kubenswrapper[4757]: I1006 14:00:51.940914 4757 scope.go:117] "RemoveContainer" containerID="4e76a7ffee77c8b8fe521732d3733beb6c04f9473f44fd387b15b45fb090db2a" Oct 06 14:00:51 crc kubenswrapper[4757]: E1006 14:00:51.941254 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4e76a7ffee77c8b8fe521732d3733beb6c04f9473f44fd387b15b45fb090db2a\": container with ID starting with 4e76a7ffee77c8b8fe521732d3733beb6c04f9473f44fd387b15b45fb090db2a not found: ID does not exist" containerID="4e76a7ffee77c8b8fe521732d3733beb6c04f9473f44fd387b15b45fb090db2a" Oct 06 14:00:51 crc kubenswrapper[4757]: I1006 14:00:51.941293 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4e76a7ffee77c8b8fe521732d3733beb6c04f9473f44fd387b15b45fb090db2a"} err="failed to get container status \"4e76a7ffee77c8b8fe521732d3733beb6c04f9473f44fd387b15b45fb090db2a\": rpc error: code = NotFound desc = could not find container \"4e76a7ffee77c8b8fe521732d3733beb6c04f9473f44fd387b15b45fb090db2a\": container with ID starting with 4e76a7ffee77c8b8fe521732d3733beb6c04f9473f44fd387b15b45fb090db2a not found: ID does not exist" Oct 06 14:00:52 crc kubenswrapper[4757]: I1006 14:00:52.193150 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e" path="/var/lib/kubelet/pods/cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e/volumes" Oct 06 14:00:52 crc kubenswrapper[4757]: I1006 14:00:52.551182 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-fqwwx_3482c1c4-b15b-46cb-a897-3528fa22adda/ovs-vswitchd/0.log" Oct 06 14:00:52 crc kubenswrapper[4757]: I1006 14:00:52.551877 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-fqwwx" event={"ID":"3482c1c4-b15b-46cb-a897-3528fa22adda","Type":"ContainerDied","Data":"9c7a96622871ab7dbf47b63299b8d069d77f24b16e932286a45618252e89ae36"} Oct 06 14:00:52 crc kubenswrapper[4757]: I1006 14:00:52.551929 4757 scope.go:117] "RemoveContainer" containerID="ca5e23427b9e45a1476331fc4c8830e07091551016296db6fab1e5fc1115f6e4" Oct 06 14:00:52 crc kubenswrapper[4757]: I1006 14:00:52.552032 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-fqwwx" Oct 06 14:00:52 crc kubenswrapper[4757]: I1006 14:00:52.585574 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-ovs-fqwwx"] Oct 06 14:00:52 crc kubenswrapper[4757]: I1006 14:00:52.590701 4757 scope.go:117] "RemoveContainer" containerID="21bc61d474e55f8578f65885857cf7de3c24140a6afe332ad8c23da23b86ee86" Oct 06 14:00:52 crc kubenswrapper[4757]: I1006 14:00:52.590993 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-ovs-fqwwx"] Oct 06 14:00:52 crc kubenswrapper[4757]: I1006 14:00:52.618479 4757 scope.go:117] "RemoveContainer" containerID="fec952e5b30b12fb3f6ba508c1eda6d8defe1d59bc1dac7aa9b44c741c60a711" Oct 06 14:00:54 crc kubenswrapper[4757]: I1006 14:00:54.190205 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3482c1c4-b15b-46cb-a897-3528fa22adda" path="/var/lib/kubelet/pods/3482c1c4-b15b-46cb-a897-3528fa22adda/volumes" Oct 06 14:00:54 crc kubenswrapper[4757]: I1006 14:00:54.329253 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican39b0-account-delete-gptgt" Oct 06 14:00:54 crc kubenswrapper[4757]: I1006 14:00:54.391509 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4g7z5\" (UniqueName: \"kubernetes.io/projected/76f4e4cc-eccd-4c44-a39a-a75c06383d92-kube-api-access-4g7z5\") pod \"76f4e4cc-eccd-4c44-a39a-a75c06383d92\" (UID: \"76f4e4cc-eccd-4c44-a39a-a75c06383d92\") " Oct 06 14:00:54 crc kubenswrapper[4757]: I1006 14:00:54.396344 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/76f4e4cc-eccd-4c44-a39a-a75c06383d92-kube-api-access-4g7z5" (OuterVolumeSpecName: "kube-api-access-4g7z5") pod "76f4e4cc-eccd-4c44-a39a-a75c06383d92" (UID: "76f4e4cc-eccd-4c44-a39a-a75c06383d92"). InnerVolumeSpecName "kube-api-access-4g7z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 14:00:54 crc kubenswrapper[4757]: I1006 14:00:54.493391 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4g7z5\" (UniqueName: \"kubernetes.io/projected/76f4e4cc-eccd-4c44-a39a-a75c06383d92-kube-api-access-4g7z5\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:54 crc kubenswrapper[4757]: I1006 14:00:54.577651 4757 generic.go:334] "Generic (PLEG): container finished" podID="76f4e4cc-eccd-4c44-a39a-a75c06383d92" containerID="e439390db414be42f02cd6bd446ea6945ddb128a94c3debceb08e055dd6f1eed" exitCode=137 Oct 06 14:00:54 crc kubenswrapper[4757]: I1006 14:00:54.577755 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican39b0-account-delete-gptgt" event={"ID":"76f4e4cc-eccd-4c44-a39a-a75c06383d92","Type":"ContainerDied","Data":"e439390db414be42f02cd6bd446ea6945ddb128a94c3debceb08e055dd6f1eed"} Oct 06 14:00:54 crc kubenswrapper[4757]: I1006 14:00:54.577966 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican39b0-account-delete-gptgt" event={"ID":"76f4e4cc-eccd-4c44-a39a-a75c06383d92","Type":"ContainerDied","Data":"7b5efd228eae4b6a01a28f5817ab8ab8af6f60b4f6db2dd3b93df8009bae97e1"} Oct 06 14:00:54 crc kubenswrapper[4757]: I1006 14:00:54.577990 4757 scope.go:117] "RemoveContainer" containerID="e439390db414be42f02cd6bd446ea6945ddb128a94c3debceb08e055dd6f1eed" Oct 06 14:00:54 crc kubenswrapper[4757]: I1006 14:00:54.577770 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican39b0-account-delete-gptgt" Oct 06 14:00:54 crc kubenswrapper[4757]: I1006 14:00:54.605378 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican39b0-account-delete-gptgt"] Oct 06 14:00:54 crc kubenswrapper[4757]: I1006 14:00:54.607328 4757 scope.go:117] "RemoveContainer" containerID="e439390db414be42f02cd6bd446ea6945ddb128a94c3debceb08e055dd6f1eed" Oct 06 14:00:54 crc kubenswrapper[4757]: E1006 14:00:54.607761 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e439390db414be42f02cd6bd446ea6945ddb128a94c3debceb08e055dd6f1eed\": container with ID starting with e439390db414be42f02cd6bd446ea6945ddb128a94c3debceb08e055dd6f1eed not found: ID does not exist" containerID="e439390db414be42f02cd6bd446ea6945ddb128a94c3debceb08e055dd6f1eed" Oct 06 14:00:54 crc kubenswrapper[4757]: I1006 14:00:54.607807 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e439390db414be42f02cd6bd446ea6945ddb128a94c3debceb08e055dd6f1eed"} err="failed to get container status \"e439390db414be42f02cd6bd446ea6945ddb128a94c3debceb08e055dd6f1eed\": rpc error: code = NotFound desc = could not find container \"e439390db414be42f02cd6bd446ea6945ddb128a94c3debceb08e055dd6f1eed\": container with ID starting with e439390db414be42f02cd6bd446ea6945ddb128a94c3debceb08e055dd6f1eed not found: ID does not exist" Oct 06 14:00:54 crc kubenswrapper[4757]: I1006 14:00:54.610421 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican39b0-account-delete-gptgt"] Oct 06 14:00:56 crc kubenswrapper[4757]: I1006 14:00:56.191731 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="76f4e4cc-eccd-4c44-a39a-a75c06383d92" path="/var/lib/kubelet/pods/76f4e4cc-eccd-4c44-a39a-a75c06383d92/volumes" Oct 06 14:00:56 crc kubenswrapper[4757]: I1006 14:00:56.476187 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/novacell00cee-account-delete-2rcjp" Oct 06 14:00:56 crc kubenswrapper[4757]: I1006 14:00:56.599539 4757 generic.go:334] "Generic (PLEG): container finished" podID="7904f7d1-2332-4402-bd0b-4a40f5be43f9" containerID="df8d26e861507a6ea9c35de1268390d49242824b4f8f3a8fc3d5c38f8370d3dd" exitCode=137 Oct 06 14:00:56 crc kubenswrapper[4757]: I1006 14:00:56.599597 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/novacell00cee-account-delete-2rcjp" Oct 06 14:00:56 crc kubenswrapper[4757]: I1006 14:00:56.599596 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novacell00cee-account-delete-2rcjp" event={"ID":"7904f7d1-2332-4402-bd0b-4a40f5be43f9","Type":"ContainerDied","Data":"df8d26e861507a6ea9c35de1268390d49242824b4f8f3a8fc3d5c38f8370d3dd"} Oct 06 14:00:56 crc kubenswrapper[4757]: I1006 14:00:56.599849 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novacell00cee-account-delete-2rcjp" event={"ID":"7904f7d1-2332-4402-bd0b-4a40f5be43f9","Type":"ContainerDied","Data":"085e26901b7a5a97a82c0c1a166bdfc809aef264b9df75d2486547fd26e622ca"} Oct 06 14:00:56 crc kubenswrapper[4757]: I1006 14:00:56.599911 4757 scope.go:117] "RemoveContainer" containerID="df8d26e861507a6ea9c35de1268390d49242824b4f8f3a8fc3d5c38f8370d3dd" Oct 06 14:00:56 crc kubenswrapper[4757]: I1006 14:00:56.624506 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-djhht\" (UniqueName: \"kubernetes.io/projected/7904f7d1-2332-4402-bd0b-4a40f5be43f9-kube-api-access-djhht\") pod \"7904f7d1-2332-4402-bd0b-4a40f5be43f9\" (UID: \"7904f7d1-2332-4402-bd0b-4a40f5be43f9\") " Oct 06 14:00:56 crc kubenswrapper[4757]: I1006 14:00:56.626665 4757 scope.go:117] "RemoveContainer" containerID="df8d26e861507a6ea9c35de1268390d49242824b4f8f3a8fc3d5c38f8370d3dd" Oct 06 14:00:56 crc kubenswrapper[4757]: E1006 14:00:56.627332 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"df8d26e861507a6ea9c35de1268390d49242824b4f8f3a8fc3d5c38f8370d3dd\": container with ID starting with df8d26e861507a6ea9c35de1268390d49242824b4f8f3a8fc3d5c38f8370d3dd not found: ID does not exist" containerID="df8d26e861507a6ea9c35de1268390d49242824b4f8f3a8fc3d5c38f8370d3dd" Oct 06 14:00:56 crc kubenswrapper[4757]: I1006 14:00:56.627373 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"df8d26e861507a6ea9c35de1268390d49242824b4f8f3a8fc3d5c38f8370d3dd"} err="failed to get container status \"df8d26e861507a6ea9c35de1268390d49242824b4f8f3a8fc3d5c38f8370d3dd\": rpc error: code = NotFound desc = could not find container \"df8d26e861507a6ea9c35de1268390d49242824b4f8f3a8fc3d5c38f8370d3dd\": container with ID starting with df8d26e861507a6ea9c35de1268390d49242824b4f8f3a8fc3d5c38f8370d3dd not found: ID does not exist" Oct 06 14:00:56 crc kubenswrapper[4757]: I1006 14:00:56.631944 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7904f7d1-2332-4402-bd0b-4a40f5be43f9-kube-api-access-djhht" (OuterVolumeSpecName: "kube-api-access-djhht") pod "7904f7d1-2332-4402-bd0b-4a40f5be43f9" (UID: "7904f7d1-2332-4402-bd0b-4a40f5be43f9"). InnerVolumeSpecName "kube-api-access-djhht". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 14:00:56 crc kubenswrapper[4757]: I1006 14:00:56.730166 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-djhht\" (UniqueName: \"kubernetes.io/projected/7904f7d1-2332-4402-bd0b-4a40f5be43f9-kube-api-access-djhht\") on node \"crc\" DevicePath \"\"" Oct 06 14:00:56 crc kubenswrapper[4757]: I1006 14:00:56.937854 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/novacell00cee-account-delete-2rcjp"] Oct 06 14:00:56 crc kubenswrapper[4757]: I1006 14:00:56.944927 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/novacell00cee-account-delete-2rcjp"] Oct 06 14:00:58 crc kubenswrapper[4757]: I1006 14:00:58.196990 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7904f7d1-2332-4402-bd0b-4a40f5be43f9" path="/var/lib/kubelet/pods/7904f7d1-2332-4402-bd0b-4a40f5be43f9/volumes" Oct 06 14:01:04 crc kubenswrapper[4757]: I1006 14:01:04.361211 4757 patch_prober.go:28] interesting pod/machine-config-daemon-7tb7h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 06 14:01:04 crc kubenswrapper[4757]: I1006 14:01:04.362059 4757 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 06 14:01:04 crc kubenswrapper[4757]: I1006 14:01:04.362168 4757 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" Oct 06 14:01:04 crc kubenswrapper[4757]: I1006 14:01:04.363319 4757 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"c34d9f422d1bc09e7e1520320e832d4b94b397917c882ecb52d4c57559a7b9dc"} pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 06 14:01:04 crc kubenswrapper[4757]: I1006 14:01:04.363448 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" containerName="machine-config-daemon" containerID="cri-o://c34d9f422d1bc09e7e1520320e832d4b94b397917c882ecb52d4c57559a7b9dc" gracePeriod=600 Oct 06 14:01:04 crc kubenswrapper[4757]: I1006 14:01:04.678083 4757 generic.go:334] "Generic (PLEG): container finished" podID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" containerID="c34d9f422d1bc09e7e1520320e832d4b94b397917c882ecb52d4c57559a7b9dc" exitCode=0 Oct 06 14:01:04 crc kubenswrapper[4757]: I1006 14:01:04.678141 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" event={"ID":"0010c888-d5ad-4b2b-8309-1647fdf0dee3","Type":"ContainerDied","Data":"c34d9f422d1bc09e7e1520320e832d4b94b397917c882ecb52d4c57559a7b9dc"} Oct 06 14:01:04 crc kubenswrapper[4757]: I1006 14:01:04.678545 4757 scope.go:117] "RemoveContainer" containerID="8ba4bb2fc370115674e0f99022b0181292af98f8a9ad6252b38df72c7f3b30ad" Oct 06 14:01:05 crc kubenswrapper[4757]: I1006 14:01:05.689143 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" event={"ID":"0010c888-d5ad-4b2b-8309-1647fdf0dee3","Type":"ContainerStarted","Data":"9b8b911f587d3751346461ba61b46f96a76b6f00c23f408e5c12876bc7f9d8d4"} Oct 06 14:01:33 crc kubenswrapper[4757]: I1006 14:01:33.891933 4757 scope.go:117] "RemoveContainer" containerID="5df84abab1caae822870fe7a316c35bbbec727b85562976ead44f3e175906eda" Oct 06 14:01:33 crc kubenswrapper[4757]: I1006 14:01:33.932757 4757 scope.go:117] "RemoveContainer" containerID="054067921d37a8aee37d6965f7bafe252c9f68fef8f26289c2c2db6e8765c420" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.400909 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-dhxj9"] Oct 06 14:01:34 crc kubenswrapper[4757]: E1006 14:01:34.401420 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e" containerName="object-updater" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.401449 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e" containerName="object-updater" Oct 06 14:01:34 crc kubenswrapper[4757]: E1006 14:01:34.401476 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e" containerName="container-updater" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.401487 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e" containerName="container-updater" Oct 06 14:01:34 crc kubenswrapper[4757]: E1006 14:01:34.401501 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="562413a3-660e-4ed9-92d6-23cb7d84b936" containerName="nova-metadata-log" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.401512 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="562413a3-660e-4ed9-92d6-23cb7d84b936" containerName="nova-metadata-log" Oct 06 14:01:34 crc kubenswrapper[4757]: E1006 14:01:34.401527 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0aa7bf0a-b5c3-4db6-b401-e8512e1df933" containerName="neutron-api" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.401536 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="0aa7bf0a-b5c3-4db6-b401-e8512e1df933" containerName="neutron-api" Oct 06 14:01:34 crc kubenswrapper[4757]: E1006 14:01:34.401552 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cc01b313-87cb-44f6-9c85-84ae4931e1f6" containerName="openstack-network-exporter" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.401563 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="cc01b313-87cb-44f6-9c85-84ae4931e1f6" containerName="openstack-network-exporter" Oct 06 14:01:34 crc kubenswrapper[4757]: E1006 14:01:34.401578 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e" containerName="object-replicator" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.401588 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e" containerName="object-replicator" Oct 06 14:01:34 crc kubenswrapper[4757]: E1006 14:01:34.401607 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="19ac04ce-d95a-49ab-8eb2-eaf505990a53" containerName="nova-scheduler-scheduler" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.401616 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="19ac04ce-d95a-49ab-8eb2-eaf505990a53" containerName="nova-scheduler-scheduler" Oct 06 14:01:34 crc kubenswrapper[4757]: E1006 14:01:34.401626 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e" containerName="account-auditor" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.401633 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e" containerName="account-auditor" Oct 06 14:01:34 crc kubenswrapper[4757]: E1006 14:01:34.401647 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e" containerName="account-reaper" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.401655 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e" containerName="account-reaper" Oct 06 14:01:34 crc kubenswrapper[4757]: E1006 14:01:34.401669 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="562413a3-660e-4ed9-92d6-23cb7d84b936" containerName="nova-metadata-metadata" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.401677 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="562413a3-660e-4ed9-92d6-23cb7d84b936" containerName="nova-metadata-metadata" Oct 06 14:01:34 crc kubenswrapper[4757]: E1006 14:01:34.401691 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac42434c-8367-4cf2-9134-2d85444f90f4" containerName="barbican-api-log" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.401702 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac42434c-8367-4cf2-9134-2d85444f90f4" containerName="barbican-api-log" Oct 06 14:01:34 crc kubenswrapper[4757]: E1006 14:01:34.401717 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="924963ee-1194-4d98-84d7-9bb3e426f7bc" containerName="nova-cell0-conductor-conductor" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.401724 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="924963ee-1194-4d98-84d7-9bb3e426f7bc" containerName="nova-cell0-conductor-conductor" Oct 06 14:01:34 crc kubenswrapper[4757]: E1006 14:01:34.401735 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3482c1c4-b15b-46cb-a897-3528fa22adda" containerName="ovs-vswitchd" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.401743 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="3482c1c4-b15b-46cb-a897-3528fa22adda" containerName="ovs-vswitchd" Oct 06 14:01:34 crc kubenswrapper[4757]: E1006 14:01:34.401759 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e" containerName="container-server" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.401767 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e" containerName="container-server" Oct 06 14:01:34 crc kubenswrapper[4757]: E1006 14:01:34.401781 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="afc51d15-69dd-4900-886c-29a4f372df24" containerName="placement-api" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.401789 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="afc51d15-69dd-4900-886c-29a4f372df24" containerName="placement-api" Oct 06 14:01:34 crc kubenswrapper[4757]: E1006 14:01:34.401802 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="542090f5-d2d8-4f78-b566-10e9885c341e" containerName="sg-core" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.401810 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="542090f5-d2d8-4f78-b566-10e9885c341e" containerName="sg-core" Oct 06 14:01:34 crc kubenswrapper[4757]: E1006 14:01:34.401820 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e6333c1-01c0-42fd-a75f-31a2c57e9db2" containerName="barbican-worker" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.401828 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e6333c1-01c0-42fd-a75f-31a2c57e9db2" containerName="barbican-worker" Oct 06 14:01:34 crc kubenswrapper[4757]: E1006 14:01:34.401842 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="542090f5-d2d8-4f78-b566-10e9885c341e" containerName="ceilometer-notification-agent" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.401850 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="542090f5-d2d8-4f78-b566-10e9885c341e" containerName="ceilometer-notification-agent" Oct 06 14:01:34 crc kubenswrapper[4757]: E1006 14:01:34.401861 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61" containerName="setup-container" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.401868 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61" containerName="setup-container" Oct 06 14:01:34 crc kubenswrapper[4757]: E1006 14:01:34.401881 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c84ddadb-263d-4a4a-bc3f-b645c449e392" containerName="nova-api-api" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.401889 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="c84ddadb-263d-4a4a-bc3f-b645c449e392" containerName="nova-api-api" Oct 06 14:01:34 crc kubenswrapper[4757]: E1006 14:01:34.401903 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4da5a9db-df84-4b71-b566-7c723fd7eb65" containerName="glance-httpd" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.401911 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="4da5a9db-df84-4b71-b566-7c723fd7eb65" containerName="glance-httpd" Oct 06 14:01:34 crc kubenswrapper[4757]: E1006 14:01:34.401921 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c84ddadb-263d-4a4a-bc3f-b645c449e392" containerName="nova-api-log" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.401928 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="c84ddadb-263d-4a4a-bc3f-b645c449e392" containerName="nova-api-log" Oct 06 14:01:34 crc kubenswrapper[4757]: E1006 14:01:34.401939 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a21e8c5a-5819-4e8c-9b20-5353625fc36b" containerName="memcached" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.401946 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="a21e8c5a-5819-4e8c-9b20-5353625fc36b" containerName="memcached" Oct 06 14:01:34 crc kubenswrapper[4757]: E1006 14:01:34.401960 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1489eff7-41ff-420a-bce0-14247f8554ee" containerName="galera" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.401968 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="1489eff7-41ff-420a-bce0-14247f8554ee" containerName="galera" Oct 06 14:01:34 crc kubenswrapper[4757]: E1006 14:01:34.401979 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0aa7bf0a-b5c3-4db6-b401-e8512e1df933" containerName="neutron-httpd" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.401986 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="0aa7bf0a-b5c3-4db6-b401-e8512e1df933" containerName="neutron-httpd" Oct 06 14:01:34 crc kubenswrapper[4757]: E1006 14:01:34.401996 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3482c1c4-b15b-46cb-a897-3528fa22adda" containerName="ovsdb-server-init" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.402004 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="3482c1c4-b15b-46cb-a897-3528fa22adda" containerName="ovsdb-server-init" Oct 06 14:01:34 crc kubenswrapper[4757]: E1006 14:01:34.402012 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5b34617-b163-4b7a-9950-53f64a8cca2c" containerName="kube-state-metrics" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.402022 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5b34617-b163-4b7a-9950-53f64a8cca2c" containerName="kube-state-metrics" Oct 06 14:01:34 crc kubenswrapper[4757]: E1006 14:01:34.402033 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e" containerName="object-auditor" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.402040 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e" containerName="object-auditor" Oct 06 14:01:34 crc kubenswrapper[4757]: E1006 14:01:34.402053 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e" containerName="account-server" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.402061 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e" containerName="account-server" Oct 06 14:01:34 crc kubenswrapper[4757]: E1006 14:01:34.402072 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="80bdd62a-4024-4734-9ca0-a97f2bae29c3" containerName="keystone-api" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.402081 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="80bdd62a-4024-4734-9ca0-a97f2bae29c3" containerName="keystone-api" Oct 06 14:01:34 crc kubenswrapper[4757]: E1006 14:01:34.402119 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4da5a9db-df84-4b71-b566-7c723fd7eb65" containerName="glance-log" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.402130 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="4da5a9db-df84-4b71-b566-7c723fd7eb65" containerName="glance-log" Oct 06 14:01:34 crc kubenswrapper[4757]: E1006 14:01:34.402146 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e" containerName="account-replicator" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.402153 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e" containerName="account-replicator" Oct 06 14:01:34 crc kubenswrapper[4757]: E1006 14:01:34.402165 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cc01b313-87cb-44f6-9c85-84ae4931e1f6" containerName="ovn-northd" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.402174 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="cc01b313-87cb-44f6-9c85-84ae4931e1f6" containerName="ovn-northd" Oct 06 14:01:34 crc kubenswrapper[4757]: E1006 14:01:34.402186 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="542090f5-d2d8-4f78-b566-10e9885c341e" containerName="ceilometer-central-agent" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.402194 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="542090f5-d2d8-4f78-b566-10e9885c341e" containerName="ceilometer-central-agent" Oct 06 14:01:34 crc kubenswrapper[4757]: E1006 14:01:34.402206 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e" containerName="container-replicator" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.402213 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e" containerName="container-replicator" Oct 06 14:01:34 crc kubenswrapper[4757]: E1006 14:01:34.402221 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e" containerName="container-auditor" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.402228 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e" containerName="container-auditor" Oct 06 14:01:34 crc kubenswrapper[4757]: E1006 14:01:34.402236 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7904f7d1-2332-4402-bd0b-4a40f5be43f9" containerName="mariadb-account-delete" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.402244 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="7904f7d1-2332-4402-bd0b-4a40f5be43f9" containerName="mariadb-account-delete" Oct 06 14:01:34 crc kubenswrapper[4757]: E1006 14:01:34.402256 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="71099fbe-349d-4a04-857c-41f270ec89af" containerName="nova-cell1-conductor-conductor" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.402266 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="71099fbe-349d-4a04-857c-41f270ec89af" containerName="nova-cell1-conductor-conductor" Oct 06 14:01:34 crc kubenswrapper[4757]: E1006 14:01:34.402281 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e" containerName="object-expirer" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.402289 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e" containerName="object-expirer" Oct 06 14:01:34 crc kubenswrapper[4757]: E1006 14:01:34.402301 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="542090f5-d2d8-4f78-b566-10e9885c341e" containerName="proxy-httpd" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.402309 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="542090f5-d2d8-4f78-b566-10e9885c341e" containerName="proxy-httpd" Oct 06 14:01:34 crc kubenswrapper[4757]: E1006 14:01:34.402318 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e" containerName="swift-recon-cron" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.402325 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e" containerName="swift-recon-cron" Oct 06 14:01:34 crc kubenswrapper[4757]: E1006 14:01:34.402336 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dffbfc0a-c4ec-41cc-873b-552bc6b7fa69" containerName="mariadb-account-delete" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.402343 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="dffbfc0a-c4ec-41cc-873b-552bc6b7fa69" containerName="mariadb-account-delete" Oct 06 14:01:34 crc kubenswrapper[4757]: E1006 14:01:34.402355 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e" containerName="object-server" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.402363 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e" containerName="object-server" Oct 06 14:01:34 crc kubenswrapper[4757]: E1006 14:01:34.402371 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61" containerName="rabbitmq" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.402379 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61" containerName="rabbitmq" Oct 06 14:01:34 crc kubenswrapper[4757]: E1006 14:01:34.402392 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="985540de-3212-41f4-a3a6-180ff5c4eda2" containerName="barbican-keystone-listener" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.402400 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="985540de-3212-41f4-a3a6-180ff5c4eda2" containerName="barbican-keystone-listener" Oct 06 14:01:34 crc kubenswrapper[4757]: E1006 14:01:34.402410 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="24ed1753-25de-4bde-8158-52cb7dd6a2f1" containerName="mariadb-account-delete" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.402417 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="24ed1753-25de-4bde-8158-52cb7dd6a2f1" containerName="mariadb-account-delete" Oct 06 14:01:34 crc kubenswrapper[4757]: E1006 14:01:34.402427 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78a4414b-7eec-457f-b08c-aeb719ffc320" containerName="glance-log" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.402435 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="78a4414b-7eec-457f-b08c-aeb719ffc320" containerName="glance-log" Oct 06 14:01:34 crc kubenswrapper[4757]: E1006 14:01:34.402448 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76f4e4cc-eccd-4c44-a39a-a75c06383d92" containerName="mariadb-account-delete" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.402456 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="76f4e4cc-eccd-4c44-a39a-a75c06383d92" containerName="mariadb-account-delete" Oct 06 14:01:34 crc kubenswrapper[4757]: E1006 14:01:34.402470 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7" containerName="rabbitmq" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.402476 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7" containerName="rabbitmq" Oct 06 14:01:34 crc kubenswrapper[4757]: E1006 14:01:34.402490 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e" containerName="rsync" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.402498 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e" containerName="rsync" Oct 06 14:01:34 crc kubenswrapper[4757]: E1006 14:01:34.402509 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac42434c-8367-4cf2-9134-2d85444f90f4" containerName="barbican-api" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.402516 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac42434c-8367-4cf2-9134-2d85444f90f4" containerName="barbican-api" Oct 06 14:01:34 crc kubenswrapper[4757]: E1006 14:01:34.402525 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1489eff7-41ff-420a-bce0-14247f8554ee" containerName="mysql-bootstrap" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.402532 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="1489eff7-41ff-420a-bce0-14247f8554ee" containerName="mysql-bootstrap" Oct 06 14:01:34 crc kubenswrapper[4757]: E1006 14:01:34.402554 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3482c1c4-b15b-46cb-a897-3528fa22adda" containerName="ovsdb-server" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.402561 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="3482c1c4-b15b-46cb-a897-3528fa22adda" containerName="ovsdb-server" Oct 06 14:01:34 crc kubenswrapper[4757]: E1006 14:01:34.402570 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="afc51d15-69dd-4900-886c-29a4f372df24" containerName="placement-log" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.402578 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="afc51d15-69dd-4900-886c-29a4f372df24" containerName="placement-log" Oct 06 14:01:34 crc kubenswrapper[4757]: E1006 14:01:34.402586 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e6333c1-01c0-42fd-a75f-31a2c57e9db2" containerName="barbican-worker-log" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.402594 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e6333c1-01c0-42fd-a75f-31a2c57e9db2" containerName="barbican-worker-log" Oct 06 14:01:34 crc kubenswrapper[4757]: E1006 14:01:34.402602 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="985540de-3212-41f4-a3a6-180ff5c4eda2" containerName="barbican-keystone-listener-log" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.402609 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="985540de-3212-41f4-a3a6-180ff5c4eda2" containerName="barbican-keystone-listener-log" Oct 06 14:01:34 crc kubenswrapper[4757]: E1006 14:01:34.402619 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7" containerName="setup-container" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.402627 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7" containerName="setup-container" Oct 06 14:01:34 crc kubenswrapper[4757]: E1006 14:01:34.402638 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78a4414b-7eec-457f-b08c-aeb719ffc320" containerName="glance-httpd" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.402645 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="78a4414b-7eec-457f-b08c-aeb719ffc320" containerName="glance-httpd" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.402861 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="985540de-3212-41f4-a3a6-180ff5c4eda2" containerName="barbican-keystone-listener-log" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.402878 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="542090f5-d2d8-4f78-b566-10e9885c341e" containerName="ceilometer-notification-agent" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.402897 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="19ac04ce-d95a-49ab-8eb2-eaf505990a53" containerName="nova-scheduler-scheduler" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.402907 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="542090f5-d2d8-4f78-b566-10e9885c341e" containerName="sg-core" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.402920 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e" containerName="account-reaper" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.402933 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="c84ddadb-263d-4a4a-bc3f-b645c449e392" containerName="nova-api-api" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.402941 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="dffbfc0a-c4ec-41cc-873b-552bc6b7fa69" containerName="mariadb-account-delete" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.402950 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="4da5a9db-df84-4b71-b566-7c723fd7eb65" containerName="glance-httpd" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.402960 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="562413a3-660e-4ed9-92d6-23cb7d84b936" containerName="nova-metadata-metadata" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.402974 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="924963ee-1194-4d98-84d7-9bb3e426f7bc" containerName="nova-cell0-conductor-conductor" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.402986 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e" containerName="swift-recon-cron" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.402997 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="c84ddadb-263d-4a4a-bc3f-b645c449e392" containerName="nova-api-log" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.403009 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e" containerName="account-replicator" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.403018 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="562413a3-660e-4ed9-92d6-23cb7d84b936" containerName="nova-metadata-log" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.403030 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="0aa7bf0a-b5c3-4db6-b401-e8512e1df933" containerName="neutron-httpd" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.403045 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="78a4414b-7eec-457f-b08c-aeb719ffc320" containerName="glance-log" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.403056 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="80bdd62a-4024-4734-9ca0-a97f2bae29c3" containerName="keystone-api" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.403065 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e" containerName="object-replicator" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.403079 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e" containerName="object-auditor" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.403114 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="cdc5a8a5-6cf7-4033-b2eb-ae43ef718a61" containerName="rabbitmq" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.403133 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e" containerName="object-expirer" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.403152 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="a21e8c5a-5819-4e8c-9b20-5353625fc36b" containerName="memcached" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.403163 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="71099fbe-349d-4a04-857c-41f270ec89af" containerName="nova-cell1-conductor-conductor" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.403174 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="afc51d15-69dd-4900-886c-29a4f372df24" containerName="placement-api" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.403185 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="0aa7bf0a-b5c3-4db6-b401-e8512e1df933" containerName="neutron-api" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.403196 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="542090f5-d2d8-4f78-b566-10e9885c341e" containerName="proxy-httpd" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.403205 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="24ed1753-25de-4bde-8158-52cb7dd6a2f1" containerName="mariadb-account-delete" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.403214 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e" containerName="object-updater" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.403222 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="542090f5-d2d8-4f78-b566-10e9885c341e" containerName="ceilometer-central-agent" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.403232 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="cc01b313-87cb-44f6-9c85-84ae4931e1f6" containerName="openstack-network-exporter" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.403242 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="3482c1c4-b15b-46cb-a897-3528fa22adda" containerName="ovsdb-server" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.403251 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e" containerName="container-server" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.403261 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="76f4e4cc-eccd-4c44-a39a-a75c06383d92" containerName="mariadb-account-delete" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.403270 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="cc01b313-87cb-44f6-9c85-84ae4931e1f6" containerName="ovn-northd" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.403284 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e" containerName="object-server" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.403296 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="4da5a9db-df84-4b71-b566-7c723fd7eb65" containerName="glance-log" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.403308 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="7904f7d1-2332-4402-bd0b-4a40f5be43f9" containerName="mariadb-account-delete" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.403323 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e" containerName="rsync" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.403339 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e" containerName="container-replicator" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.403353 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="0fcb9b45-a0d6-4e89-ad16-d17a8d82e9f7" containerName="rabbitmq" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.403367 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="3482c1c4-b15b-46cb-a897-3528fa22adda" containerName="ovs-vswitchd" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.403381 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e" containerName="account-auditor" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.403390 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="c5b34617-b163-4b7a-9950-53f64a8cca2c" containerName="kube-state-metrics" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.403408 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="afc51d15-69dd-4900-886c-29a4f372df24" containerName="placement-log" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.403425 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e" containerName="container-updater" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.403436 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="3e6333c1-01c0-42fd-a75f-31a2c57e9db2" containerName="barbican-worker-log" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.403449 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="1489eff7-41ff-420a-bce0-14247f8554ee" containerName="galera" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.403459 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e" containerName="account-server" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.403469 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="78a4414b-7eec-457f-b08c-aeb719ffc320" containerName="glance-httpd" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.403484 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="cf3caac3-9cc6-43d2-a3e6-2e8c02cb9c0e" containerName="container-auditor" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.403499 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="3e6333c1-01c0-42fd-a75f-31a2c57e9db2" containerName="barbican-worker" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.403512 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="ac42434c-8367-4cf2-9134-2d85444f90f4" containerName="barbican-api-log" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.403528 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="985540de-3212-41f4-a3a6-180ff5c4eda2" containerName="barbican-keystone-listener" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.403539 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="ac42434c-8367-4cf2-9134-2d85444f90f4" containerName="barbican-api" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.404800 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-dhxj9" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.428070 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-dhxj9"] Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.512039 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/39149ad3-57c3-40d1-95b0-5e4961f4c01b-catalog-content\") pod \"redhat-marketplace-dhxj9\" (UID: \"39149ad3-57c3-40d1-95b0-5e4961f4c01b\") " pod="openshift-marketplace/redhat-marketplace-dhxj9" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.512087 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/39149ad3-57c3-40d1-95b0-5e4961f4c01b-utilities\") pod \"redhat-marketplace-dhxj9\" (UID: \"39149ad3-57c3-40d1-95b0-5e4961f4c01b\") " pod="openshift-marketplace/redhat-marketplace-dhxj9" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.512247 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jm6q9\" (UniqueName: \"kubernetes.io/projected/39149ad3-57c3-40d1-95b0-5e4961f4c01b-kube-api-access-jm6q9\") pod \"redhat-marketplace-dhxj9\" (UID: \"39149ad3-57c3-40d1-95b0-5e4961f4c01b\") " pod="openshift-marketplace/redhat-marketplace-dhxj9" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.613951 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/39149ad3-57c3-40d1-95b0-5e4961f4c01b-catalog-content\") pod \"redhat-marketplace-dhxj9\" (UID: \"39149ad3-57c3-40d1-95b0-5e4961f4c01b\") " pod="openshift-marketplace/redhat-marketplace-dhxj9" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.614005 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/39149ad3-57c3-40d1-95b0-5e4961f4c01b-utilities\") pod \"redhat-marketplace-dhxj9\" (UID: \"39149ad3-57c3-40d1-95b0-5e4961f4c01b\") " pod="openshift-marketplace/redhat-marketplace-dhxj9" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.614061 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jm6q9\" (UniqueName: \"kubernetes.io/projected/39149ad3-57c3-40d1-95b0-5e4961f4c01b-kube-api-access-jm6q9\") pod \"redhat-marketplace-dhxj9\" (UID: \"39149ad3-57c3-40d1-95b0-5e4961f4c01b\") " pod="openshift-marketplace/redhat-marketplace-dhxj9" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.614914 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/39149ad3-57c3-40d1-95b0-5e4961f4c01b-catalog-content\") pod \"redhat-marketplace-dhxj9\" (UID: \"39149ad3-57c3-40d1-95b0-5e4961f4c01b\") " pod="openshift-marketplace/redhat-marketplace-dhxj9" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.614983 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/39149ad3-57c3-40d1-95b0-5e4961f4c01b-utilities\") pod \"redhat-marketplace-dhxj9\" (UID: \"39149ad3-57c3-40d1-95b0-5e4961f4c01b\") " pod="openshift-marketplace/redhat-marketplace-dhxj9" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.636081 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jm6q9\" (UniqueName: \"kubernetes.io/projected/39149ad3-57c3-40d1-95b0-5e4961f4c01b-kube-api-access-jm6q9\") pod \"redhat-marketplace-dhxj9\" (UID: \"39149ad3-57c3-40d1-95b0-5e4961f4c01b\") " pod="openshift-marketplace/redhat-marketplace-dhxj9" Oct 06 14:01:34 crc kubenswrapper[4757]: I1006 14:01:34.747348 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-dhxj9" Oct 06 14:01:35 crc kubenswrapper[4757]: I1006 14:01:35.212957 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-dhxj9"] Oct 06 14:01:35 crc kubenswrapper[4757]: W1006 14:01:35.227394 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod39149ad3_57c3_40d1_95b0_5e4961f4c01b.slice/crio-371bdb6963e7a9b7e79449a1426cd9eb61945e46d22ea11986738545036a2168 WatchSource:0}: Error finding container 371bdb6963e7a9b7e79449a1426cd9eb61945e46d22ea11986738545036a2168: Status 404 returned error can't find the container with id 371bdb6963e7a9b7e79449a1426cd9eb61945e46d22ea11986738545036a2168 Oct 06 14:01:36 crc kubenswrapper[4757]: I1006 14:01:36.015181 4757 generic.go:334] "Generic (PLEG): container finished" podID="39149ad3-57c3-40d1-95b0-5e4961f4c01b" containerID="84c869eab3b69f2d00e8716713553dfadaf272ffaaffbc73a9d8f61ef4ccbced" exitCode=0 Oct 06 14:01:36 crc kubenswrapper[4757]: I1006 14:01:36.015246 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dhxj9" event={"ID":"39149ad3-57c3-40d1-95b0-5e4961f4c01b","Type":"ContainerDied","Data":"84c869eab3b69f2d00e8716713553dfadaf272ffaaffbc73a9d8f61ef4ccbced"} Oct 06 14:01:36 crc kubenswrapper[4757]: I1006 14:01:36.015285 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dhxj9" event={"ID":"39149ad3-57c3-40d1-95b0-5e4961f4c01b","Type":"ContainerStarted","Data":"371bdb6963e7a9b7e79449a1426cd9eb61945e46d22ea11986738545036a2168"} Oct 06 14:01:36 crc kubenswrapper[4757]: I1006 14:01:36.022686 4757 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 06 14:01:37 crc kubenswrapper[4757]: I1006 14:01:37.026613 4757 generic.go:334] "Generic (PLEG): container finished" podID="39149ad3-57c3-40d1-95b0-5e4961f4c01b" containerID="5572724cf0887d2e8a08f6fb4cb7273611e3a1a48e2a6b48924461b050135a3f" exitCode=0 Oct 06 14:01:37 crc kubenswrapper[4757]: I1006 14:01:37.026691 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dhxj9" event={"ID":"39149ad3-57c3-40d1-95b0-5e4961f4c01b","Type":"ContainerDied","Data":"5572724cf0887d2e8a08f6fb4cb7273611e3a1a48e2a6b48924461b050135a3f"} Oct 06 14:01:38 crc kubenswrapper[4757]: I1006 14:01:38.043818 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dhxj9" event={"ID":"39149ad3-57c3-40d1-95b0-5e4961f4c01b","Type":"ContainerStarted","Data":"44be3d3160a1921ba4de49f7f1c26e5dbe01eb6ff152ed9e649687bcdd5072bb"} Oct 06 14:01:38 crc kubenswrapper[4757]: I1006 14:01:38.070735 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-dhxj9" podStartSLOduration=2.616207729 podStartE2EDuration="4.070704546s" podCreationTimestamp="2025-10-06 14:01:34 +0000 UTC" firstStartedPulling="2025-10-06 14:01:36.019914193 +0000 UTC m=+1384.517232770" lastFinishedPulling="2025-10-06 14:01:37.47441105 +0000 UTC m=+1385.971729587" observedRunningTime="2025-10-06 14:01:38.067753282 +0000 UTC m=+1386.565071879" watchObservedRunningTime="2025-10-06 14:01:38.070704546 +0000 UTC m=+1386.568023123" Oct 06 14:01:44 crc kubenswrapper[4757]: I1006 14:01:44.748144 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-dhxj9" Oct 06 14:01:44 crc kubenswrapper[4757]: I1006 14:01:44.748537 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-dhxj9" Oct 06 14:01:44 crc kubenswrapper[4757]: I1006 14:01:44.831663 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-dhxj9" Oct 06 14:01:45 crc kubenswrapper[4757]: I1006 14:01:45.214311 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-dhxj9" Oct 06 14:01:45 crc kubenswrapper[4757]: I1006 14:01:45.275394 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-dhxj9"] Oct 06 14:01:47 crc kubenswrapper[4757]: I1006 14:01:47.152310 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-dhxj9" podUID="39149ad3-57c3-40d1-95b0-5e4961f4c01b" containerName="registry-server" containerID="cri-o://44be3d3160a1921ba4de49f7f1c26e5dbe01eb6ff152ed9e649687bcdd5072bb" gracePeriod=2 Oct 06 14:01:47 crc kubenswrapper[4757]: I1006 14:01:47.598150 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-dhxj9" Oct 06 14:01:47 crc kubenswrapper[4757]: I1006 14:01:47.723485 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/39149ad3-57c3-40d1-95b0-5e4961f4c01b-utilities\") pod \"39149ad3-57c3-40d1-95b0-5e4961f4c01b\" (UID: \"39149ad3-57c3-40d1-95b0-5e4961f4c01b\") " Oct 06 14:01:47 crc kubenswrapper[4757]: I1006 14:01:47.723565 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/39149ad3-57c3-40d1-95b0-5e4961f4c01b-catalog-content\") pod \"39149ad3-57c3-40d1-95b0-5e4961f4c01b\" (UID: \"39149ad3-57c3-40d1-95b0-5e4961f4c01b\") " Oct 06 14:01:47 crc kubenswrapper[4757]: I1006 14:01:47.723645 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jm6q9\" (UniqueName: \"kubernetes.io/projected/39149ad3-57c3-40d1-95b0-5e4961f4c01b-kube-api-access-jm6q9\") pod \"39149ad3-57c3-40d1-95b0-5e4961f4c01b\" (UID: \"39149ad3-57c3-40d1-95b0-5e4961f4c01b\") " Oct 06 14:01:47 crc kubenswrapper[4757]: I1006 14:01:47.724883 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/39149ad3-57c3-40d1-95b0-5e4961f4c01b-utilities" (OuterVolumeSpecName: "utilities") pod "39149ad3-57c3-40d1-95b0-5e4961f4c01b" (UID: "39149ad3-57c3-40d1-95b0-5e4961f4c01b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 14:01:47 crc kubenswrapper[4757]: I1006 14:01:47.731395 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/39149ad3-57c3-40d1-95b0-5e4961f4c01b-kube-api-access-jm6q9" (OuterVolumeSpecName: "kube-api-access-jm6q9") pod "39149ad3-57c3-40d1-95b0-5e4961f4c01b" (UID: "39149ad3-57c3-40d1-95b0-5e4961f4c01b"). InnerVolumeSpecName "kube-api-access-jm6q9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 14:01:47 crc kubenswrapper[4757]: I1006 14:01:47.738242 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/39149ad3-57c3-40d1-95b0-5e4961f4c01b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "39149ad3-57c3-40d1-95b0-5e4961f4c01b" (UID: "39149ad3-57c3-40d1-95b0-5e4961f4c01b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 14:01:47 crc kubenswrapper[4757]: I1006 14:01:47.825518 4757 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/39149ad3-57c3-40d1-95b0-5e4961f4c01b-utilities\") on node \"crc\" DevicePath \"\"" Oct 06 14:01:47 crc kubenswrapper[4757]: I1006 14:01:47.825547 4757 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/39149ad3-57c3-40d1-95b0-5e4961f4c01b-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 06 14:01:47 crc kubenswrapper[4757]: I1006 14:01:47.825558 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jm6q9\" (UniqueName: \"kubernetes.io/projected/39149ad3-57c3-40d1-95b0-5e4961f4c01b-kube-api-access-jm6q9\") on node \"crc\" DevicePath \"\"" Oct 06 14:01:48 crc kubenswrapper[4757]: I1006 14:01:48.166859 4757 generic.go:334] "Generic (PLEG): container finished" podID="39149ad3-57c3-40d1-95b0-5e4961f4c01b" containerID="44be3d3160a1921ba4de49f7f1c26e5dbe01eb6ff152ed9e649687bcdd5072bb" exitCode=0 Oct 06 14:01:48 crc kubenswrapper[4757]: I1006 14:01:48.166916 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dhxj9" event={"ID":"39149ad3-57c3-40d1-95b0-5e4961f4c01b","Type":"ContainerDied","Data":"44be3d3160a1921ba4de49f7f1c26e5dbe01eb6ff152ed9e649687bcdd5072bb"} Oct 06 14:01:48 crc kubenswrapper[4757]: I1006 14:01:48.166958 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dhxj9" event={"ID":"39149ad3-57c3-40d1-95b0-5e4961f4c01b","Type":"ContainerDied","Data":"371bdb6963e7a9b7e79449a1426cd9eb61945e46d22ea11986738545036a2168"} Oct 06 14:01:48 crc kubenswrapper[4757]: I1006 14:01:48.166982 4757 scope.go:117] "RemoveContainer" containerID="44be3d3160a1921ba4de49f7f1c26e5dbe01eb6ff152ed9e649687bcdd5072bb" Oct 06 14:01:48 crc kubenswrapper[4757]: I1006 14:01:48.167011 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-dhxj9" Oct 06 14:01:48 crc kubenswrapper[4757]: I1006 14:01:48.197419 4757 scope.go:117] "RemoveContainer" containerID="5572724cf0887d2e8a08f6fb4cb7273611e3a1a48e2a6b48924461b050135a3f" Oct 06 14:01:48 crc kubenswrapper[4757]: I1006 14:01:48.224425 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-dhxj9"] Oct 06 14:01:48 crc kubenswrapper[4757]: I1006 14:01:48.235397 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-dhxj9"] Oct 06 14:01:48 crc kubenswrapper[4757]: I1006 14:01:48.239221 4757 scope.go:117] "RemoveContainer" containerID="84c869eab3b69f2d00e8716713553dfadaf272ffaaffbc73a9d8f61ef4ccbced" Oct 06 14:01:48 crc kubenswrapper[4757]: I1006 14:01:48.278781 4757 scope.go:117] "RemoveContainer" containerID="44be3d3160a1921ba4de49f7f1c26e5dbe01eb6ff152ed9e649687bcdd5072bb" Oct 06 14:01:48 crc kubenswrapper[4757]: E1006 14:01:48.279367 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"44be3d3160a1921ba4de49f7f1c26e5dbe01eb6ff152ed9e649687bcdd5072bb\": container with ID starting with 44be3d3160a1921ba4de49f7f1c26e5dbe01eb6ff152ed9e649687bcdd5072bb not found: ID does not exist" containerID="44be3d3160a1921ba4de49f7f1c26e5dbe01eb6ff152ed9e649687bcdd5072bb" Oct 06 14:01:48 crc kubenswrapper[4757]: I1006 14:01:48.279738 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"44be3d3160a1921ba4de49f7f1c26e5dbe01eb6ff152ed9e649687bcdd5072bb"} err="failed to get container status \"44be3d3160a1921ba4de49f7f1c26e5dbe01eb6ff152ed9e649687bcdd5072bb\": rpc error: code = NotFound desc = could not find container \"44be3d3160a1921ba4de49f7f1c26e5dbe01eb6ff152ed9e649687bcdd5072bb\": container with ID starting with 44be3d3160a1921ba4de49f7f1c26e5dbe01eb6ff152ed9e649687bcdd5072bb not found: ID does not exist" Oct 06 14:01:48 crc kubenswrapper[4757]: I1006 14:01:48.279774 4757 scope.go:117] "RemoveContainer" containerID="5572724cf0887d2e8a08f6fb4cb7273611e3a1a48e2a6b48924461b050135a3f" Oct 06 14:01:48 crc kubenswrapper[4757]: E1006 14:01:48.280401 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5572724cf0887d2e8a08f6fb4cb7273611e3a1a48e2a6b48924461b050135a3f\": container with ID starting with 5572724cf0887d2e8a08f6fb4cb7273611e3a1a48e2a6b48924461b050135a3f not found: ID does not exist" containerID="5572724cf0887d2e8a08f6fb4cb7273611e3a1a48e2a6b48924461b050135a3f" Oct 06 14:01:48 crc kubenswrapper[4757]: I1006 14:01:48.280479 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5572724cf0887d2e8a08f6fb4cb7273611e3a1a48e2a6b48924461b050135a3f"} err="failed to get container status \"5572724cf0887d2e8a08f6fb4cb7273611e3a1a48e2a6b48924461b050135a3f\": rpc error: code = NotFound desc = could not find container \"5572724cf0887d2e8a08f6fb4cb7273611e3a1a48e2a6b48924461b050135a3f\": container with ID starting with 5572724cf0887d2e8a08f6fb4cb7273611e3a1a48e2a6b48924461b050135a3f not found: ID does not exist" Oct 06 14:01:48 crc kubenswrapper[4757]: I1006 14:01:48.280523 4757 scope.go:117] "RemoveContainer" containerID="84c869eab3b69f2d00e8716713553dfadaf272ffaaffbc73a9d8f61ef4ccbced" Oct 06 14:01:48 crc kubenswrapper[4757]: E1006 14:01:48.281227 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"84c869eab3b69f2d00e8716713553dfadaf272ffaaffbc73a9d8f61ef4ccbced\": container with ID starting with 84c869eab3b69f2d00e8716713553dfadaf272ffaaffbc73a9d8f61ef4ccbced not found: ID does not exist" containerID="84c869eab3b69f2d00e8716713553dfadaf272ffaaffbc73a9d8f61ef4ccbced" Oct 06 14:01:48 crc kubenswrapper[4757]: I1006 14:01:48.281276 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"84c869eab3b69f2d00e8716713553dfadaf272ffaaffbc73a9d8f61ef4ccbced"} err="failed to get container status \"84c869eab3b69f2d00e8716713553dfadaf272ffaaffbc73a9d8f61ef4ccbced\": rpc error: code = NotFound desc = could not find container \"84c869eab3b69f2d00e8716713553dfadaf272ffaaffbc73a9d8f61ef4ccbced\": container with ID starting with 84c869eab3b69f2d00e8716713553dfadaf272ffaaffbc73a9d8f61ef4ccbced not found: ID does not exist" Oct 06 14:01:50 crc kubenswrapper[4757]: I1006 14:01:50.200201 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="39149ad3-57c3-40d1-95b0-5e4961f4c01b" path="/var/lib/kubelet/pods/39149ad3-57c3-40d1-95b0-5e4961f4c01b/volumes" Oct 06 14:02:34 crc kubenswrapper[4757]: I1006 14:02:34.123757 4757 scope.go:117] "RemoveContainer" containerID="0fd05490d9790b9d51fe787c6d786ad598ea8b54f12c45e654e901227b207bd1" Oct 06 14:02:34 crc kubenswrapper[4757]: I1006 14:02:34.182981 4757 scope.go:117] "RemoveContainer" containerID="0a01bd55e84626495bc5fb0d7d2194d09355579e800eb0a54e05bb99ed8a7ca6" Oct 06 14:02:34 crc kubenswrapper[4757]: I1006 14:02:34.216668 4757 scope.go:117] "RemoveContainer" containerID="47b53b2403b9eba16bf691abd65e159f7ebc8cbf8525c716c07c59cbfd29b411" Oct 06 14:02:34 crc kubenswrapper[4757]: I1006 14:02:34.243499 4757 scope.go:117] "RemoveContainer" containerID="7ac68cc64c4f397d9ab7370c8469fd24d779a4e388ecba14d87d13a324cae7f5" Oct 06 14:02:34 crc kubenswrapper[4757]: I1006 14:02:34.276859 4757 scope.go:117] "RemoveContainer" containerID="ea10fa561dc98e7d6bbfee54e36f1a3d032db95d0fe11323ec54fed9a2ecc0c3" Oct 06 14:02:34 crc kubenswrapper[4757]: I1006 14:02:34.303143 4757 scope.go:117] "RemoveContainer" containerID="34a280e57324a0c49141921ad2801af530e40f7172ee7808ae2fc674330a1bf6" Oct 06 14:02:34 crc kubenswrapper[4757]: I1006 14:02:34.333251 4757 scope.go:117] "RemoveContainer" containerID="0d642b8e9315ccd4eb8cc69149282e3a4cc291685b4614e00eb8322c6b1d9bc6" Oct 06 14:02:34 crc kubenswrapper[4757]: I1006 14:02:34.366261 4757 scope.go:117] "RemoveContainer" containerID="a3d7e27eebbc4b60ce6095c1c6c4522a56191ad028a762e172e625e2020092be" Oct 06 14:02:34 crc kubenswrapper[4757]: I1006 14:02:34.393308 4757 scope.go:117] "RemoveContainer" containerID="275da12e97ad2918f8f513a63a2e9ec2e48a957a2487ef120a3e00be65430cf8" Oct 06 14:02:34 crc kubenswrapper[4757]: I1006 14:02:34.416462 4757 scope.go:117] "RemoveContainer" containerID="5b260f81b1e0965e466a896729e6f3b179fbab40f5ade6c74a4f99ade063546c" Oct 06 14:02:34 crc kubenswrapper[4757]: I1006 14:02:34.448255 4757 scope.go:117] "RemoveContainer" containerID="c0a49243055a7805559b9f7f3b0ff4636eef387948f20f859087ff9372d37c1b" Oct 06 14:02:34 crc kubenswrapper[4757]: I1006 14:02:34.471687 4757 scope.go:117] "RemoveContainer" containerID="c03ad77e6eab96b0f4db0d65b0073b2f4981616ff4113026446bdb1c1cfbe544" Oct 06 14:02:34 crc kubenswrapper[4757]: I1006 14:02:34.486402 4757 scope.go:117] "RemoveContainer" containerID="199ad9e8a53109edcd91ed95ddd4ba51048d9ad4e3b2ca26b98c845c9f4ddebc" Oct 06 14:02:34 crc kubenswrapper[4757]: I1006 14:02:34.500835 4757 scope.go:117] "RemoveContainer" containerID="0c00dffa5260f96cc6b67586125cd512b47e1b2350ec1a5a68e7c643721a29c7" Oct 06 14:02:34 crc kubenswrapper[4757]: I1006 14:02:34.514406 4757 scope.go:117] "RemoveContainer" containerID="a01221e8d3ac0dc2f1c87b5858c652449959b77ec1ba6714a10026c0882576cc" Oct 06 14:02:57 crc kubenswrapper[4757]: I1006 14:02:57.125204 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-xzp94"] Oct 06 14:02:57 crc kubenswrapper[4757]: E1006 14:02:57.126516 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="39149ad3-57c3-40d1-95b0-5e4961f4c01b" containerName="extract-content" Oct 06 14:02:57 crc kubenswrapper[4757]: I1006 14:02:57.126536 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="39149ad3-57c3-40d1-95b0-5e4961f4c01b" containerName="extract-content" Oct 06 14:02:57 crc kubenswrapper[4757]: E1006 14:02:57.126551 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="39149ad3-57c3-40d1-95b0-5e4961f4c01b" containerName="extract-utilities" Oct 06 14:02:57 crc kubenswrapper[4757]: I1006 14:02:57.126559 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="39149ad3-57c3-40d1-95b0-5e4961f4c01b" containerName="extract-utilities" Oct 06 14:02:57 crc kubenswrapper[4757]: E1006 14:02:57.126588 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="39149ad3-57c3-40d1-95b0-5e4961f4c01b" containerName="registry-server" Oct 06 14:02:57 crc kubenswrapper[4757]: I1006 14:02:57.126596 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="39149ad3-57c3-40d1-95b0-5e4961f4c01b" containerName="registry-server" Oct 06 14:02:57 crc kubenswrapper[4757]: I1006 14:02:57.126780 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="39149ad3-57c3-40d1-95b0-5e4961f4c01b" containerName="registry-server" Oct 06 14:02:57 crc kubenswrapper[4757]: I1006 14:02:57.128284 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-xzp94" Oct 06 14:02:57 crc kubenswrapper[4757]: I1006 14:02:57.137229 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-xzp94"] Oct 06 14:02:57 crc kubenswrapper[4757]: I1006 14:02:57.308009 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/52651d32-a520-4f4f-8456-e55dbf3195da-utilities\") pod \"certified-operators-xzp94\" (UID: \"52651d32-a520-4f4f-8456-e55dbf3195da\") " pod="openshift-marketplace/certified-operators-xzp94" Oct 06 14:02:57 crc kubenswrapper[4757]: I1006 14:02:57.308133 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/52651d32-a520-4f4f-8456-e55dbf3195da-catalog-content\") pod \"certified-operators-xzp94\" (UID: \"52651d32-a520-4f4f-8456-e55dbf3195da\") " pod="openshift-marketplace/certified-operators-xzp94" Oct 06 14:02:57 crc kubenswrapper[4757]: I1006 14:02:57.308156 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2jkpb\" (UniqueName: \"kubernetes.io/projected/52651d32-a520-4f4f-8456-e55dbf3195da-kube-api-access-2jkpb\") pod \"certified-operators-xzp94\" (UID: \"52651d32-a520-4f4f-8456-e55dbf3195da\") " pod="openshift-marketplace/certified-operators-xzp94" Oct 06 14:02:57 crc kubenswrapper[4757]: I1006 14:02:57.409781 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/52651d32-a520-4f4f-8456-e55dbf3195da-utilities\") pod \"certified-operators-xzp94\" (UID: \"52651d32-a520-4f4f-8456-e55dbf3195da\") " pod="openshift-marketplace/certified-operators-xzp94" Oct 06 14:02:57 crc kubenswrapper[4757]: I1006 14:02:57.410201 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/52651d32-a520-4f4f-8456-e55dbf3195da-catalog-content\") pod \"certified-operators-xzp94\" (UID: \"52651d32-a520-4f4f-8456-e55dbf3195da\") " pod="openshift-marketplace/certified-operators-xzp94" Oct 06 14:02:57 crc kubenswrapper[4757]: I1006 14:02:57.410232 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2jkpb\" (UniqueName: \"kubernetes.io/projected/52651d32-a520-4f4f-8456-e55dbf3195da-kube-api-access-2jkpb\") pod \"certified-operators-xzp94\" (UID: \"52651d32-a520-4f4f-8456-e55dbf3195da\") " pod="openshift-marketplace/certified-operators-xzp94" Oct 06 14:02:57 crc kubenswrapper[4757]: I1006 14:02:57.410516 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/52651d32-a520-4f4f-8456-e55dbf3195da-utilities\") pod \"certified-operators-xzp94\" (UID: \"52651d32-a520-4f4f-8456-e55dbf3195da\") " pod="openshift-marketplace/certified-operators-xzp94" Oct 06 14:02:57 crc kubenswrapper[4757]: I1006 14:02:57.410758 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/52651d32-a520-4f4f-8456-e55dbf3195da-catalog-content\") pod \"certified-operators-xzp94\" (UID: \"52651d32-a520-4f4f-8456-e55dbf3195da\") " pod="openshift-marketplace/certified-operators-xzp94" Oct 06 14:02:57 crc kubenswrapper[4757]: I1006 14:02:57.445907 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2jkpb\" (UniqueName: \"kubernetes.io/projected/52651d32-a520-4f4f-8456-e55dbf3195da-kube-api-access-2jkpb\") pod \"certified-operators-xzp94\" (UID: \"52651d32-a520-4f4f-8456-e55dbf3195da\") " pod="openshift-marketplace/certified-operators-xzp94" Oct 06 14:02:57 crc kubenswrapper[4757]: I1006 14:02:57.453894 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-xzp94" Oct 06 14:02:57 crc kubenswrapper[4757]: I1006 14:02:57.962891 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-xzp94"] Oct 06 14:02:58 crc kubenswrapper[4757]: I1006 14:02:58.971399 4757 generic.go:334] "Generic (PLEG): container finished" podID="52651d32-a520-4f4f-8456-e55dbf3195da" containerID="ff0bbae2e3af8f8ee89cc8d9d19916d0c0320244a187d03e15d865c2de73e807" exitCode=0 Oct 06 14:02:58 crc kubenswrapper[4757]: I1006 14:02:58.971502 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xzp94" event={"ID":"52651d32-a520-4f4f-8456-e55dbf3195da","Type":"ContainerDied","Data":"ff0bbae2e3af8f8ee89cc8d9d19916d0c0320244a187d03e15d865c2de73e807"} Oct 06 14:02:58 crc kubenswrapper[4757]: I1006 14:02:58.971756 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xzp94" event={"ID":"52651d32-a520-4f4f-8456-e55dbf3195da","Type":"ContainerStarted","Data":"7651c7144c0dba45103bae3c2a25b59a082fe340dff4bacc7714e8199f883c4e"} Oct 06 14:03:01 crc kubenswrapper[4757]: I1006 14:03:01.004241 4757 generic.go:334] "Generic (PLEG): container finished" podID="52651d32-a520-4f4f-8456-e55dbf3195da" containerID="9817051ff266f32f77be6a3b7089bf5ffc5e5f89bdb004e52b55f536553734c9" exitCode=0 Oct 06 14:03:01 crc kubenswrapper[4757]: I1006 14:03:01.004317 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xzp94" event={"ID":"52651d32-a520-4f4f-8456-e55dbf3195da","Type":"ContainerDied","Data":"9817051ff266f32f77be6a3b7089bf5ffc5e5f89bdb004e52b55f536553734c9"} Oct 06 14:03:02 crc kubenswrapper[4757]: I1006 14:03:02.017154 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xzp94" event={"ID":"52651d32-a520-4f4f-8456-e55dbf3195da","Type":"ContainerStarted","Data":"fc306a7394caa6470fdd29a04d7d11b5a7fc82b48a965571b6296615f1d6d8e9"} Oct 06 14:03:02 crc kubenswrapper[4757]: I1006 14:03:02.051568 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-xzp94" podStartSLOduration=2.617057586 podStartE2EDuration="5.051539511s" podCreationTimestamp="2025-10-06 14:02:57 +0000 UTC" firstStartedPulling="2025-10-06 14:02:58.973252538 +0000 UTC m=+1467.470571115" lastFinishedPulling="2025-10-06 14:03:01.407734503 +0000 UTC m=+1469.905053040" observedRunningTime="2025-10-06 14:03:02.043800444 +0000 UTC m=+1470.541118981" watchObservedRunningTime="2025-10-06 14:03:02.051539511 +0000 UTC m=+1470.548858058" Oct 06 14:03:04 crc kubenswrapper[4757]: I1006 14:03:04.361597 4757 patch_prober.go:28] interesting pod/machine-config-daemon-7tb7h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 06 14:03:04 crc kubenswrapper[4757]: I1006 14:03:04.361993 4757 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 06 14:03:07 crc kubenswrapper[4757]: I1006 14:03:07.454004 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-xzp94" Oct 06 14:03:07 crc kubenswrapper[4757]: I1006 14:03:07.454607 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-xzp94" Oct 06 14:03:07 crc kubenswrapper[4757]: I1006 14:03:07.514874 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-xzp94" Oct 06 14:03:08 crc kubenswrapper[4757]: I1006 14:03:08.143484 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-xzp94" Oct 06 14:03:08 crc kubenswrapper[4757]: I1006 14:03:08.206459 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-xzp94"] Oct 06 14:03:10 crc kubenswrapper[4757]: I1006 14:03:10.089412 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-xzp94" podUID="52651d32-a520-4f4f-8456-e55dbf3195da" containerName="registry-server" containerID="cri-o://fc306a7394caa6470fdd29a04d7d11b5a7fc82b48a965571b6296615f1d6d8e9" gracePeriod=2 Oct 06 14:03:10 crc kubenswrapper[4757]: I1006 14:03:10.573532 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-xzp94" Oct 06 14:03:10 crc kubenswrapper[4757]: I1006 14:03:10.735917 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/52651d32-a520-4f4f-8456-e55dbf3195da-catalog-content\") pod \"52651d32-a520-4f4f-8456-e55dbf3195da\" (UID: \"52651d32-a520-4f4f-8456-e55dbf3195da\") " Oct 06 14:03:10 crc kubenswrapper[4757]: I1006 14:03:10.736020 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2jkpb\" (UniqueName: \"kubernetes.io/projected/52651d32-a520-4f4f-8456-e55dbf3195da-kube-api-access-2jkpb\") pod \"52651d32-a520-4f4f-8456-e55dbf3195da\" (UID: \"52651d32-a520-4f4f-8456-e55dbf3195da\") " Oct 06 14:03:10 crc kubenswrapper[4757]: I1006 14:03:10.736072 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/52651d32-a520-4f4f-8456-e55dbf3195da-utilities\") pod \"52651d32-a520-4f4f-8456-e55dbf3195da\" (UID: \"52651d32-a520-4f4f-8456-e55dbf3195da\") " Oct 06 14:03:10 crc kubenswrapper[4757]: I1006 14:03:10.737742 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/52651d32-a520-4f4f-8456-e55dbf3195da-utilities" (OuterVolumeSpecName: "utilities") pod "52651d32-a520-4f4f-8456-e55dbf3195da" (UID: "52651d32-a520-4f4f-8456-e55dbf3195da"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 14:03:10 crc kubenswrapper[4757]: I1006 14:03:10.745596 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/52651d32-a520-4f4f-8456-e55dbf3195da-kube-api-access-2jkpb" (OuterVolumeSpecName: "kube-api-access-2jkpb") pod "52651d32-a520-4f4f-8456-e55dbf3195da" (UID: "52651d32-a520-4f4f-8456-e55dbf3195da"). InnerVolumeSpecName "kube-api-access-2jkpb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 14:03:10 crc kubenswrapper[4757]: I1006 14:03:10.812903 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/52651d32-a520-4f4f-8456-e55dbf3195da-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "52651d32-a520-4f4f-8456-e55dbf3195da" (UID: "52651d32-a520-4f4f-8456-e55dbf3195da"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 14:03:10 crc kubenswrapper[4757]: I1006 14:03:10.838440 4757 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/52651d32-a520-4f4f-8456-e55dbf3195da-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 06 14:03:10 crc kubenswrapper[4757]: I1006 14:03:10.838787 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2jkpb\" (UniqueName: \"kubernetes.io/projected/52651d32-a520-4f4f-8456-e55dbf3195da-kube-api-access-2jkpb\") on node \"crc\" DevicePath \"\"" Oct 06 14:03:10 crc kubenswrapper[4757]: I1006 14:03:10.838838 4757 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/52651d32-a520-4f4f-8456-e55dbf3195da-utilities\") on node \"crc\" DevicePath \"\"" Oct 06 14:03:11 crc kubenswrapper[4757]: I1006 14:03:11.103932 4757 generic.go:334] "Generic (PLEG): container finished" podID="52651d32-a520-4f4f-8456-e55dbf3195da" containerID="fc306a7394caa6470fdd29a04d7d11b5a7fc82b48a965571b6296615f1d6d8e9" exitCode=0 Oct 06 14:03:11 crc kubenswrapper[4757]: I1006 14:03:11.103984 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xzp94" event={"ID":"52651d32-a520-4f4f-8456-e55dbf3195da","Type":"ContainerDied","Data":"fc306a7394caa6470fdd29a04d7d11b5a7fc82b48a965571b6296615f1d6d8e9"} Oct 06 14:03:11 crc kubenswrapper[4757]: I1006 14:03:11.104010 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xzp94" event={"ID":"52651d32-a520-4f4f-8456-e55dbf3195da","Type":"ContainerDied","Data":"7651c7144c0dba45103bae3c2a25b59a082fe340dff4bacc7714e8199f883c4e"} Oct 06 14:03:11 crc kubenswrapper[4757]: I1006 14:03:11.104026 4757 scope.go:117] "RemoveContainer" containerID="fc306a7394caa6470fdd29a04d7d11b5a7fc82b48a965571b6296615f1d6d8e9" Oct 06 14:03:11 crc kubenswrapper[4757]: I1006 14:03:11.104059 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-xzp94" Oct 06 14:03:11 crc kubenswrapper[4757]: I1006 14:03:11.127169 4757 scope.go:117] "RemoveContainer" containerID="9817051ff266f32f77be6a3b7089bf5ffc5e5f89bdb004e52b55f536553734c9" Oct 06 14:03:11 crc kubenswrapper[4757]: I1006 14:03:11.143712 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-xzp94"] Oct 06 14:03:11 crc kubenswrapper[4757]: I1006 14:03:11.148192 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-xzp94"] Oct 06 14:03:11 crc kubenswrapper[4757]: I1006 14:03:11.155927 4757 scope.go:117] "RemoveContainer" containerID="ff0bbae2e3af8f8ee89cc8d9d19916d0c0320244a187d03e15d865c2de73e807" Oct 06 14:03:11 crc kubenswrapper[4757]: I1006 14:03:11.174030 4757 scope.go:117] "RemoveContainer" containerID="fc306a7394caa6470fdd29a04d7d11b5a7fc82b48a965571b6296615f1d6d8e9" Oct 06 14:03:11 crc kubenswrapper[4757]: E1006 14:03:11.174425 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fc306a7394caa6470fdd29a04d7d11b5a7fc82b48a965571b6296615f1d6d8e9\": container with ID starting with fc306a7394caa6470fdd29a04d7d11b5a7fc82b48a965571b6296615f1d6d8e9 not found: ID does not exist" containerID="fc306a7394caa6470fdd29a04d7d11b5a7fc82b48a965571b6296615f1d6d8e9" Oct 06 14:03:11 crc kubenswrapper[4757]: I1006 14:03:11.174462 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fc306a7394caa6470fdd29a04d7d11b5a7fc82b48a965571b6296615f1d6d8e9"} err="failed to get container status \"fc306a7394caa6470fdd29a04d7d11b5a7fc82b48a965571b6296615f1d6d8e9\": rpc error: code = NotFound desc = could not find container \"fc306a7394caa6470fdd29a04d7d11b5a7fc82b48a965571b6296615f1d6d8e9\": container with ID starting with fc306a7394caa6470fdd29a04d7d11b5a7fc82b48a965571b6296615f1d6d8e9 not found: ID does not exist" Oct 06 14:03:11 crc kubenswrapper[4757]: I1006 14:03:11.174489 4757 scope.go:117] "RemoveContainer" containerID="9817051ff266f32f77be6a3b7089bf5ffc5e5f89bdb004e52b55f536553734c9" Oct 06 14:03:11 crc kubenswrapper[4757]: E1006 14:03:11.174815 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9817051ff266f32f77be6a3b7089bf5ffc5e5f89bdb004e52b55f536553734c9\": container with ID starting with 9817051ff266f32f77be6a3b7089bf5ffc5e5f89bdb004e52b55f536553734c9 not found: ID does not exist" containerID="9817051ff266f32f77be6a3b7089bf5ffc5e5f89bdb004e52b55f536553734c9" Oct 06 14:03:11 crc kubenswrapper[4757]: I1006 14:03:11.174835 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9817051ff266f32f77be6a3b7089bf5ffc5e5f89bdb004e52b55f536553734c9"} err="failed to get container status \"9817051ff266f32f77be6a3b7089bf5ffc5e5f89bdb004e52b55f536553734c9\": rpc error: code = NotFound desc = could not find container \"9817051ff266f32f77be6a3b7089bf5ffc5e5f89bdb004e52b55f536553734c9\": container with ID starting with 9817051ff266f32f77be6a3b7089bf5ffc5e5f89bdb004e52b55f536553734c9 not found: ID does not exist" Oct 06 14:03:11 crc kubenswrapper[4757]: I1006 14:03:11.174849 4757 scope.go:117] "RemoveContainer" containerID="ff0bbae2e3af8f8ee89cc8d9d19916d0c0320244a187d03e15d865c2de73e807" Oct 06 14:03:11 crc kubenswrapper[4757]: E1006 14:03:11.175068 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ff0bbae2e3af8f8ee89cc8d9d19916d0c0320244a187d03e15d865c2de73e807\": container with ID starting with ff0bbae2e3af8f8ee89cc8d9d19916d0c0320244a187d03e15d865c2de73e807 not found: ID does not exist" containerID="ff0bbae2e3af8f8ee89cc8d9d19916d0c0320244a187d03e15d865c2de73e807" Oct 06 14:03:11 crc kubenswrapper[4757]: I1006 14:03:11.175103 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ff0bbae2e3af8f8ee89cc8d9d19916d0c0320244a187d03e15d865c2de73e807"} err="failed to get container status \"ff0bbae2e3af8f8ee89cc8d9d19916d0c0320244a187d03e15d865c2de73e807\": rpc error: code = NotFound desc = could not find container \"ff0bbae2e3af8f8ee89cc8d9d19916d0c0320244a187d03e15d865c2de73e807\": container with ID starting with ff0bbae2e3af8f8ee89cc8d9d19916d0c0320244a187d03e15d865c2de73e807 not found: ID does not exist" Oct 06 14:03:12 crc kubenswrapper[4757]: I1006 14:03:12.195152 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="52651d32-a520-4f4f-8456-e55dbf3195da" path="/var/lib/kubelet/pods/52651d32-a520-4f4f-8456-e55dbf3195da/volumes" Oct 06 14:03:13 crc kubenswrapper[4757]: I1006 14:03:13.182008 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-tdv9z"] Oct 06 14:03:13 crc kubenswrapper[4757]: E1006 14:03:13.183015 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="52651d32-a520-4f4f-8456-e55dbf3195da" containerName="registry-server" Oct 06 14:03:13 crc kubenswrapper[4757]: I1006 14:03:13.183059 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="52651d32-a520-4f4f-8456-e55dbf3195da" containerName="registry-server" Oct 06 14:03:13 crc kubenswrapper[4757]: E1006 14:03:13.183085 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="52651d32-a520-4f4f-8456-e55dbf3195da" containerName="extract-content" Oct 06 14:03:13 crc kubenswrapper[4757]: I1006 14:03:13.183129 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="52651d32-a520-4f4f-8456-e55dbf3195da" containerName="extract-content" Oct 06 14:03:13 crc kubenswrapper[4757]: E1006 14:03:13.183166 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="52651d32-a520-4f4f-8456-e55dbf3195da" containerName="extract-utilities" Oct 06 14:03:13 crc kubenswrapper[4757]: I1006 14:03:13.183183 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="52651d32-a520-4f4f-8456-e55dbf3195da" containerName="extract-utilities" Oct 06 14:03:13 crc kubenswrapper[4757]: I1006 14:03:13.188465 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="52651d32-a520-4f4f-8456-e55dbf3195da" containerName="registry-server" Oct 06 14:03:13 crc kubenswrapper[4757]: I1006 14:03:13.190160 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-tdv9z" Oct 06 14:03:13 crc kubenswrapper[4757]: I1006 14:03:13.199681 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-tdv9z"] Oct 06 14:03:13 crc kubenswrapper[4757]: I1006 14:03:13.278147 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4jtww\" (UniqueName: \"kubernetes.io/projected/9ced939c-c6bd-41ec-a3db-d2ece022f035-kube-api-access-4jtww\") pod \"community-operators-tdv9z\" (UID: \"9ced939c-c6bd-41ec-a3db-d2ece022f035\") " pod="openshift-marketplace/community-operators-tdv9z" Oct 06 14:03:13 crc kubenswrapper[4757]: I1006 14:03:13.278447 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9ced939c-c6bd-41ec-a3db-d2ece022f035-catalog-content\") pod \"community-operators-tdv9z\" (UID: \"9ced939c-c6bd-41ec-a3db-d2ece022f035\") " pod="openshift-marketplace/community-operators-tdv9z" Oct 06 14:03:13 crc kubenswrapper[4757]: I1006 14:03:13.278641 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9ced939c-c6bd-41ec-a3db-d2ece022f035-utilities\") pod \"community-operators-tdv9z\" (UID: \"9ced939c-c6bd-41ec-a3db-d2ece022f035\") " pod="openshift-marketplace/community-operators-tdv9z" Oct 06 14:03:13 crc kubenswrapper[4757]: I1006 14:03:13.379905 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4jtww\" (UniqueName: \"kubernetes.io/projected/9ced939c-c6bd-41ec-a3db-d2ece022f035-kube-api-access-4jtww\") pod \"community-operators-tdv9z\" (UID: \"9ced939c-c6bd-41ec-a3db-d2ece022f035\") " pod="openshift-marketplace/community-operators-tdv9z" Oct 06 14:03:13 crc kubenswrapper[4757]: I1006 14:03:13.380007 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9ced939c-c6bd-41ec-a3db-d2ece022f035-catalog-content\") pod \"community-operators-tdv9z\" (UID: \"9ced939c-c6bd-41ec-a3db-d2ece022f035\") " pod="openshift-marketplace/community-operators-tdv9z" Oct 06 14:03:13 crc kubenswrapper[4757]: I1006 14:03:13.380051 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9ced939c-c6bd-41ec-a3db-d2ece022f035-utilities\") pod \"community-operators-tdv9z\" (UID: \"9ced939c-c6bd-41ec-a3db-d2ece022f035\") " pod="openshift-marketplace/community-operators-tdv9z" Oct 06 14:03:13 crc kubenswrapper[4757]: I1006 14:03:13.380787 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9ced939c-c6bd-41ec-a3db-d2ece022f035-catalog-content\") pod \"community-operators-tdv9z\" (UID: \"9ced939c-c6bd-41ec-a3db-d2ece022f035\") " pod="openshift-marketplace/community-operators-tdv9z" Oct 06 14:03:13 crc kubenswrapper[4757]: I1006 14:03:13.380892 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9ced939c-c6bd-41ec-a3db-d2ece022f035-utilities\") pod \"community-operators-tdv9z\" (UID: \"9ced939c-c6bd-41ec-a3db-d2ece022f035\") " pod="openshift-marketplace/community-operators-tdv9z" Oct 06 14:03:13 crc kubenswrapper[4757]: I1006 14:03:13.416932 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4jtww\" (UniqueName: \"kubernetes.io/projected/9ced939c-c6bd-41ec-a3db-d2ece022f035-kube-api-access-4jtww\") pod \"community-operators-tdv9z\" (UID: \"9ced939c-c6bd-41ec-a3db-d2ece022f035\") " pod="openshift-marketplace/community-operators-tdv9z" Oct 06 14:03:13 crc kubenswrapper[4757]: I1006 14:03:13.534600 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-tdv9z" Oct 06 14:03:14 crc kubenswrapper[4757]: I1006 14:03:14.005578 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-tdv9z"] Oct 06 14:03:14 crc kubenswrapper[4757]: I1006 14:03:14.159234 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tdv9z" event={"ID":"9ced939c-c6bd-41ec-a3db-d2ece022f035","Type":"ContainerStarted","Data":"b2fe0fff72a1fe5723a418d10ec81175a1be0a8a46a0fd758289d1941545c5a7"} Oct 06 14:03:15 crc kubenswrapper[4757]: I1006 14:03:15.170969 4757 generic.go:334] "Generic (PLEG): container finished" podID="9ced939c-c6bd-41ec-a3db-d2ece022f035" containerID="60ad6eaa659a88a6e53c254d9aa101812c92c9557643e1af096aeb58df60ba0c" exitCode=0 Oct 06 14:03:15 crc kubenswrapper[4757]: I1006 14:03:15.171308 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tdv9z" event={"ID":"9ced939c-c6bd-41ec-a3db-d2ece022f035","Type":"ContainerDied","Data":"60ad6eaa659a88a6e53c254d9aa101812c92c9557643e1af096aeb58df60ba0c"} Oct 06 14:03:16 crc kubenswrapper[4757]: I1006 14:03:16.182974 4757 generic.go:334] "Generic (PLEG): container finished" podID="9ced939c-c6bd-41ec-a3db-d2ece022f035" containerID="cbfbad748a4e29139ef185b4f30c1c144d93c33f5bb2aabe63a849d86c35b7ca" exitCode=0 Oct 06 14:03:16 crc kubenswrapper[4757]: I1006 14:03:16.189790 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tdv9z" event={"ID":"9ced939c-c6bd-41ec-a3db-d2ece022f035","Type":"ContainerDied","Data":"cbfbad748a4e29139ef185b4f30c1c144d93c33f5bb2aabe63a849d86c35b7ca"} Oct 06 14:03:17 crc kubenswrapper[4757]: I1006 14:03:17.198881 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tdv9z" event={"ID":"9ced939c-c6bd-41ec-a3db-d2ece022f035","Type":"ContainerStarted","Data":"e96b9adeb99f8364159dcd305d6f7866945738063d895ed7fc8f6a5e2e2397e1"} Oct 06 14:03:17 crc kubenswrapper[4757]: I1006 14:03:17.225448 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-tdv9z" podStartSLOduration=2.823808333 podStartE2EDuration="4.225424487s" podCreationTimestamp="2025-10-06 14:03:13 +0000 UTC" firstStartedPulling="2025-10-06 14:03:15.173267261 +0000 UTC m=+1483.670585808" lastFinishedPulling="2025-10-06 14:03:16.574883435 +0000 UTC m=+1485.072201962" observedRunningTime="2025-10-06 14:03:17.221391869 +0000 UTC m=+1485.718710416" watchObservedRunningTime="2025-10-06 14:03:17.225424487 +0000 UTC m=+1485.722743044" Oct 06 14:03:23 crc kubenswrapper[4757]: I1006 14:03:23.535512 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-tdv9z" Oct 06 14:03:23 crc kubenswrapper[4757]: I1006 14:03:23.536972 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-tdv9z" Oct 06 14:03:23 crc kubenswrapper[4757]: I1006 14:03:23.610730 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-tdv9z" Oct 06 14:03:24 crc kubenswrapper[4757]: I1006 14:03:24.334016 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-tdv9z" Oct 06 14:03:24 crc kubenswrapper[4757]: I1006 14:03:24.407665 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-tdv9z"] Oct 06 14:03:26 crc kubenswrapper[4757]: I1006 14:03:26.278737 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-tdv9z" podUID="9ced939c-c6bd-41ec-a3db-d2ece022f035" containerName="registry-server" containerID="cri-o://e96b9adeb99f8364159dcd305d6f7866945738063d895ed7fc8f6a5e2e2397e1" gracePeriod=2 Oct 06 14:03:26 crc kubenswrapper[4757]: I1006 14:03:26.796295 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-tdv9z" Oct 06 14:03:26 crc kubenswrapper[4757]: I1006 14:03:26.976137 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4jtww\" (UniqueName: \"kubernetes.io/projected/9ced939c-c6bd-41ec-a3db-d2ece022f035-kube-api-access-4jtww\") pod \"9ced939c-c6bd-41ec-a3db-d2ece022f035\" (UID: \"9ced939c-c6bd-41ec-a3db-d2ece022f035\") " Oct 06 14:03:26 crc kubenswrapper[4757]: I1006 14:03:26.976183 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9ced939c-c6bd-41ec-a3db-d2ece022f035-catalog-content\") pod \"9ced939c-c6bd-41ec-a3db-d2ece022f035\" (UID: \"9ced939c-c6bd-41ec-a3db-d2ece022f035\") " Oct 06 14:03:26 crc kubenswrapper[4757]: I1006 14:03:26.976205 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9ced939c-c6bd-41ec-a3db-d2ece022f035-utilities\") pod \"9ced939c-c6bd-41ec-a3db-d2ece022f035\" (UID: \"9ced939c-c6bd-41ec-a3db-d2ece022f035\") " Oct 06 14:03:26 crc kubenswrapper[4757]: I1006 14:03:26.977448 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9ced939c-c6bd-41ec-a3db-d2ece022f035-utilities" (OuterVolumeSpecName: "utilities") pod "9ced939c-c6bd-41ec-a3db-d2ece022f035" (UID: "9ced939c-c6bd-41ec-a3db-d2ece022f035"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 14:03:26 crc kubenswrapper[4757]: I1006 14:03:26.981780 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9ced939c-c6bd-41ec-a3db-d2ece022f035-kube-api-access-4jtww" (OuterVolumeSpecName: "kube-api-access-4jtww") pod "9ced939c-c6bd-41ec-a3db-d2ece022f035" (UID: "9ced939c-c6bd-41ec-a3db-d2ece022f035"). InnerVolumeSpecName "kube-api-access-4jtww". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 14:03:27 crc kubenswrapper[4757]: I1006 14:03:27.025673 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9ced939c-c6bd-41ec-a3db-d2ece022f035-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9ced939c-c6bd-41ec-a3db-d2ece022f035" (UID: "9ced939c-c6bd-41ec-a3db-d2ece022f035"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 14:03:27 crc kubenswrapper[4757]: I1006 14:03:27.078765 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4jtww\" (UniqueName: \"kubernetes.io/projected/9ced939c-c6bd-41ec-a3db-d2ece022f035-kube-api-access-4jtww\") on node \"crc\" DevicePath \"\"" Oct 06 14:03:27 crc kubenswrapper[4757]: I1006 14:03:27.079033 4757 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9ced939c-c6bd-41ec-a3db-d2ece022f035-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 06 14:03:27 crc kubenswrapper[4757]: I1006 14:03:27.079144 4757 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9ced939c-c6bd-41ec-a3db-d2ece022f035-utilities\") on node \"crc\" DevicePath \"\"" Oct 06 14:03:27 crc kubenswrapper[4757]: I1006 14:03:27.291003 4757 generic.go:334] "Generic (PLEG): container finished" podID="9ced939c-c6bd-41ec-a3db-d2ece022f035" containerID="e96b9adeb99f8364159dcd305d6f7866945738063d895ed7fc8f6a5e2e2397e1" exitCode=0 Oct 06 14:03:27 crc kubenswrapper[4757]: I1006 14:03:27.291187 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-tdv9z" Oct 06 14:03:27 crc kubenswrapper[4757]: I1006 14:03:27.291226 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tdv9z" event={"ID":"9ced939c-c6bd-41ec-a3db-d2ece022f035","Type":"ContainerDied","Data":"e96b9adeb99f8364159dcd305d6f7866945738063d895ed7fc8f6a5e2e2397e1"} Oct 06 14:03:27 crc kubenswrapper[4757]: I1006 14:03:27.296197 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tdv9z" event={"ID":"9ced939c-c6bd-41ec-a3db-d2ece022f035","Type":"ContainerDied","Data":"b2fe0fff72a1fe5723a418d10ec81175a1be0a8a46a0fd758289d1941545c5a7"} Oct 06 14:03:27 crc kubenswrapper[4757]: I1006 14:03:27.296241 4757 scope.go:117] "RemoveContainer" containerID="e96b9adeb99f8364159dcd305d6f7866945738063d895ed7fc8f6a5e2e2397e1" Oct 06 14:03:27 crc kubenswrapper[4757]: I1006 14:03:27.325977 4757 scope.go:117] "RemoveContainer" containerID="cbfbad748a4e29139ef185b4f30c1c144d93c33f5bb2aabe63a849d86c35b7ca" Oct 06 14:03:27 crc kubenswrapper[4757]: I1006 14:03:27.340145 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-tdv9z"] Oct 06 14:03:27 crc kubenswrapper[4757]: I1006 14:03:27.343328 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-tdv9z"] Oct 06 14:03:27 crc kubenswrapper[4757]: I1006 14:03:27.411640 4757 scope.go:117] "RemoveContainer" containerID="60ad6eaa659a88a6e53c254d9aa101812c92c9557643e1af096aeb58df60ba0c" Oct 06 14:03:27 crc kubenswrapper[4757]: I1006 14:03:27.453332 4757 scope.go:117] "RemoveContainer" containerID="e96b9adeb99f8364159dcd305d6f7866945738063d895ed7fc8f6a5e2e2397e1" Oct 06 14:03:27 crc kubenswrapper[4757]: E1006 14:03:27.453799 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e96b9adeb99f8364159dcd305d6f7866945738063d895ed7fc8f6a5e2e2397e1\": container with ID starting with e96b9adeb99f8364159dcd305d6f7866945738063d895ed7fc8f6a5e2e2397e1 not found: ID does not exist" containerID="e96b9adeb99f8364159dcd305d6f7866945738063d895ed7fc8f6a5e2e2397e1" Oct 06 14:03:27 crc kubenswrapper[4757]: I1006 14:03:27.453884 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e96b9adeb99f8364159dcd305d6f7866945738063d895ed7fc8f6a5e2e2397e1"} err="failed to get container status \"e96b9adeb99f8364159dcd305d6f7866945738063d895ed7fc8f6a5e2e2397e1\": rpc error: code = NotFound desc = could not find container \"e96b9adeb99f8364159dcd305d6f7866945738063d895ed7fc8f6a5e2e2397e1\": container with ID starting with e96b9adeb99f8364159dcd305d6f7866945738063d895ed7fc8f6a5e2e2397e1 not found: ID does not exist" Oct 06 14:03:27 crc kubenswrapper[4757]: I1006 14:03:27.453960 4757 scope.go:117] "RemoveContainer" containerID="cbfbad748a4e29139ef185b4f30c1c144d93c33f5bb2aabe63a849d86c35b7ca" Oct 06 14:03:27 crc kubenswrapper[4757]: E1006 14:03:27.454349 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cbfbad748a4e29139ef185b4f30c1c144d93c33f5bb2aabe63a849d86c35b7ca\": container with ID starting with cbfbad748a4e29139ef185b4f30c1c144d93c33f5bb2aabe63a849d86c35b7ca not found: ID does not exist" containerID="cbfbad748a4e29139ef185b4f30c1c144d93c33f5bb2aabe63a849d86c35b7ca" Oct 06 14:03:27 crc kubenswrapper[4757]: I1006 14:03:27.454398 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cbfbad748a4e29139ef185b4f30c1c144d93c33f5bb2aabe63a849d86c35b7ca"} err="failed to get container status \"cbfbad748a4e29139ef185b4f30c1c144d93c33f5bb2aabe63a849d86c35b7ca\": rpc error: code = NotFound desc = could not find container \"cbfbad748a4e29139ef185b4f30c1c144d93c33f5bb2aabe63a849d86c35b7ca\": container with ID starting with cbfbad748a4e29139ef185b4f30c1c144d93c33f5bb2aabe63a849d86c35b7ca not found: ID does not exist" Oct 06 14:03:27 crc kubenswrapper[4757]: I1006 14:03:27.454432 4757 scope.go:117] "RemoveContainer" containerID="60ad6eaa659a88a6e53c254d9aa101812c92c9557643e1af096aeb58df60ba0c" Oct 06 14:03:27 crc kubenswrapper[4757]: E1006 14:03:27.454812 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"60ad6eaa659a88a6e53c254d9aa101812c92c9557643e1af096aeb58df60ba0c\": container with ID starting with 60ad6eaa659a88a6e53c254d9aa101812c92c9557643e1af096aeb58df60ba0c not found: ID does not exist" containerID="60ad6eaa659a88a6e53c254d9aa101812c92c9557643e1af096aeb58df60ba0c" Oct 06 14:03:27 crc kubenswrapper[4757]: I1006 14:03:27.454867 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"60ad6eaa659a88a6e53c254d9aa101812c92c9557643e1af096aeb58df60ba0c"} err="failed to get container status \"60ad6eaa659a88a6e53c254d9aa101812c92c9557643e1af096aeb58df60ba0c\": rpc error: code = NotFound desc = could not find container \"60ad6eaa659a88a6e53c254d9aa101812c92c9557643e1af096aeb58df60ba0c\": container with ID starting with 60ad6eaa659a88a6e53c254d9aa101812c92c9557643e1af096aeb58df60ba0c not found: ID does not exist" Oct 06 14:03:28 crc kubenswrapper[4757]: I1006 14:03:28.193415 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9ced939c-c6bd-41ec-a3db-d2ece022f035" path="/var/lib/kubelet/pods/9ced939c-c6bd-41ec-a3db-d2ece022f035/volumes" Oct 06 14:03:34 crc kubenswrapper[4757]: I1006 14:03:34.361345 4757 patch_prober.go:28] interesting pod/machine-config-daemon-7tb7h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 06 14:03:34 crc kubenswrapper[4757]: I1006 14:03:34.361964 4757 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 06 14:03:34 crc kubenswrapper[4757]: I1006 14:03:34.738519 4757 scope.go:117] "RemoveContainer" containerID="7f6ec78624b0739a9256eb6ee704e52cd0322c374b53cffbaf675ecadc0c2437" Oct 06 14:03:34 crc kubenswrapper[4757]: I1006 14:03:34.790181 4757 scope.go:117] "RemoveContainer" containerID="aa3678cfeaf79fb26c4c0f487ec077377631d000fb2a222d822e9b0d65cb8401" Oct 06 14:03:34 crc kubenswrapper[4757]: I1006 14:03:34.826031 4757 scope.go:117] "RemoveContainer" containerID="63d81f209fa475240764f493ecd86259a67b861ff2a98cd14b6c0cc2ae647646" Oct 06 14:03:34 crc kubenswrapper[4757]: I1006 14:03:34.861346 4757 scope.go:117] "RemoveContainer" containerID="59cd64048238008a1b88736345245c3cfac17e11de3199977a66e0af74a3cec4" Oct 06 14:03:34 crc kubenswrapper[4757]: I1006 14:03:34.901212 4757 scope.go:117] "RemoveContainer" containerID="fe40956a831a0da9019f8af107b4834c649a88bece271363e82d0b6d1b5ddd22" Oct 06 14:03:34 crc kubenswrapper[4757]: I1006 14:03:34.935721 4757 scope.go:117] "RemoveContainer" containerID="74d919e3817afeb51a780567751651163b720b8c4b693d9e053959c708eb06f2" Oct 06 14:03:34 crc kubenswrapper[4757]: I1006 14:03:34.957642 4757 scope.go:117] "RemoveContainer" containerID="11741bf6a070a7b95f60ae214af84e63160f149a2d7a859698b0e2b05ecdfd03" Oct 06 14:03:35 crc kubenswrapper[4757]: I1006 14:03:35.002432 4757 scope.go:117] "RemoveContainer" containerID="c64ab0d20fb2646123dfc9302dd89591647788afb3b670d1b19c581cda2b5162" Oct 06 14:03:35 crc kubenswrapper[4757]: I1006 14:03:35.040254 4757 scope.go:117] "RemoveContainer" containerID="e4fa44361d718502c0ccce9f909093402d4d992d6d6aa62c7fa2758b3a57bf2f" Oct 06 14:03:35 crc kubenswrapper[4757]: I1006 14:03:35.065891 4757 scope.go:117] "RemoveContainer" containerID="261f6b4aa7ccd2c620651f90574d988ef7b35bce2e0e2eb1b3acfa5a31d3ae4a" Oct 06 14:04:04 crc kubenswrapper[4757]: I1006 14:04:04.361600 4757 patch_prober.go:28] interesting pod/machine-config-daemon-7tb7h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 06 14:04:04 crc kubenswrapper[4757]: I1006 14:04:04.362377 4757 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 06 14:04:04 crc kubenswrapper[4757]: I1006 14:04:04.362452 4757 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" Oct 06 14:04:04 crc kubenswrapper[4757]: I1006 14:04:04.364301 4757 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"9b8b911f587d3751346461ba61b46f96a76b6f00c23f408e5c12876bc7f9d8d4"} pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 06 14:04:04 crc kubenswrapper[4757]: I1006 14:04:04.364412 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" containerName="machine-config-daemon" containerID="cri-o://9b8b911f587d3751346461ba61b46f96a76b6f00c23f408e5c12876bc7f9d8d4" gracePeriod=600 Oct 06 14:04:04 crc kubenswrapper[4757]: E1006 14:04:04.506396 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:04:04 crc kubenswrapper[4757]: I1006 14:04:04.618917 4757 generic.go:334] "Generic (PLEG): container finished" podID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" containerID="9b8b911f587d3751346461ba61b46f96a76b6f00c23f408e5c12876bc7f9d8d4" exitCode=0 Oct 06 14:04:04 crc kubenswrapper[4757]: I1006 14:04:04.618944 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" event={"ID":"0010c888-d5ad-4b2b-8309-1647fdf0dee3","Type":"ContainerDied","Data":"9b8b911f587d3751346461ba61b46f96a76b6f00c23f408e5c12876bc7f9d8d4"} Oct 06 14:04:04 crc kubenswrapper[4757]: I1006 14:04:04.619013 4757 scope.go:117] "RemoveContainer" containerID="c34d9f422d1bc09e7e1520320e832d4b94b397917c882ecb52d4c57559a7b9dc" Oct 06 14:04:04 crc kubenswrapper[4757]: I1006 14:04:04.620131 4757 scope.go:117] "RemoveContainer" containerID="9b8b911f587d3751346461ba61b46f96a76b6f00c23f408e5c12876bc7f9d8d4" Oct 06 14:04:04 crc kubenswrapper[4757]: E1006 14:04:04.623729 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:04:17 crc kubenswrapper[4757]: I1006 14:04:17.180070 4757 scope.go:117] "RemoveContainer" containerID="9b8b911f587d3751346461ba61b46f96a76b6f00c23f408e5c12876bc7f9d8d4" Oct 06 14:04:17 crc kubenswrapper[4757]: E1006 14:04:17.180945 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:04:32 crc kubenswrapper[4757]: I1006 14:04:32.185698 4757 scope.go:117] "RemoveContainer" containerID="9b8b911f587d3751346461ba61b46f96a76b6f00c23f408e5c12876bc7f9d8d4" Oct 06 14:04:32 crc kubenswrapper[4757]: E1006 14:04:32.186902 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:04:35 crc kubenswrapper[4757]: I1006 14:04:35.235878 4757 scope.go:117] "RemoveContainer" containerID="7be833793fbb9ececc1b726d6d92fe03be0ddc3a27f933c54e05fd8e35b9371a" Oct 06 14:04:35 crc kubenswrapper[4757]: I1006 14:04:35.258333 4757 scope.go:117] "RemoveContainer" containerID="3180ad49ec83de44356f248b7955019a2a0fc4b090f172d398ff980f512d38f8" Oct 06 14:04:35 crc kubenswrapper[4757]: I1006 14:04:35.297966 4757 scope.go:117] "RemoveContainer" containerID="2ce5bbfdb33f92bc2d7fc9048c4b7ff29928b7e522b0a4d950c6dfedcf2c2880" Oct 06 14:04:35 crc kubenswrapper[4757]: I1006 14:04:35.339188 4757 scope.go:117] "RemoveContainer" containerID="1f848e4aee396e52434f17956365c4c6aaf22ac1b6bad582ca2b6f0263241cd5" Oct 06 14:04:35 crc kubenswrapper[4757]: I1006 14:04:35.365629 4757 scope.go:117] "RemoveContainer" containerID="4b13bf86c84437f44865715abaf06cb2add1a3ab9683cc1eea5272ace7b108bd" Oct 06 14:04:35 crc kubenswrapper[4757]: I1006 14:04:35.418793 4757 scope.go:117] "RemoveContainer" containerID="14022a75e2f882a054a2a8082d11133582e6585d36ef89867d94fb837bf54886" Oct 06 14:04:35 crc kubenswrapper[4757]: I1006 14:04:35.438475 4757 scope.go:117] "RemoveContainer" containerID="ece99cfce39d7c0be1b08a62794d4aa985ed9e4c7a9eebc564749f2540ee8d63" Oct 06 14:04:35 crc kubenswrapper[4757]: I1006 14:04:35.462866 4757 scope.go:117] "RemoveContainer" containerID="ba1f45ef3932bf918c1b59fa1ed6629efa95b65ac36b643ce4bf6f517905a218" Oct 06 14:04:35 crc kubenswrapper[4757]: I1006 14:04:35.479465 4757 scope.go:117] "RemoveContainer" containerID="9e94ec7898a8d5cddd1308d4a24463b9f75f24738f45b41036651b8543dd2f7c" Oct 06 14:04:44 crc kubenswrapper[4757]: I1006 14:04:44.179966 4757 scope.go:117] "RemoveContainer" containerID="9b8b911f587d3751346461ba61b46f96a76b6f00c23f408e5c12876bc7f9d8d4" Oct 06 14:04:44 crc kubenswrapper[4757]: E1006 14:04:44.181285 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:04:56 crc kubenswrapper[4757]: I1006 14:04:56.181165 4757 scope.go:117] "RemoveContainer" containerID="9b8b911f587d3751346461ba61b46f96a76b6f00c23f408e5c12876bc7f9d8d4" Oct 06 14:04:56 crc kubenswrapper[4757]: E1006 14:04:56.182481 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:05:08 crc kubenswrapper[4757]: I1006 14:05:08.182054 4757 scope.go:117] "RemoveContainer" containerID="9b8b911f587d3751346461ba61b46f96a76b6f00c23f408e5c12876bc7f9d8d4" Oct 06 14:05:08 crc kubenswrapper[4757]: E1006 14:05:08.183740 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:05:21 crc kubenswrapper[4757]: I1006 14:05:21.179799 4757 scope.go:117] "RemoveContainer" containerID="9b8b911f587d3751346461ba61b46f96a76b6f00c23f408e5c12876bc7f9d8d4" Oct 06 14:05:21 crc kubenswrapper[4757]: E1006 14:05:21.181658 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:05:35 crc kubenswrapper[4757]: I1006 14:05:35.581683 4757 scope.go:117] "RemoveContainer" containerID="1e20cdc4c0ab7fbe34675da2bd3b70c163eadea2e1127276d5eac3f283996f8c" Oct 06 14:05:35 crc kubenswrapper[4757]: I1006 14:05:35.609576 4757 scope.go:117] "RemoveContainer" containerID="85d511bf0638c5d866a7350e35a676d505d59153aaeb1e1c9cb119aa200d5d73" Oct 06 14:05:35 crc kubenswrapper[4757]: I1006 14:05:35.657396 4757 scope.go:117] "RemoveContainer" containerID="ead89b6eb7ef2cdb8afe050d91ea530c7f417212cefe4bec8bab853834ee9629" Oct 06 14:05:35 crc kubenswrapper[4757]: I1006 14:05:35.695197 4757 scope.go:117] "RemoveContainer" containerID="f43c9069102422c3978d089df3df7fb465e8c5a8088261091ed94d97c7fca5a0" Oct 06 14:05:36 crc kubenswrapper[4757]: I1006 14:05:36.181347 4757 scope.go:117] "RemoveContainer" containerID="9b8b911f587d3751346461ba61b46f96a76b6f00c23f408e5c12876bc7f9d8d4" Oct 06 14:05:36 crc kubenswrapper[4757]: E1006 14:05:36.181838 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:05:51 crc kubenswrapper[4757]: I1006 14:05:51.179647 4757 scope.go:117] "RemoveContainer" containerID="9b8b911f587d3751346461ba61b46f96a76b6f00c23f408e5c12876bc7f9d8d4" Oct 06 14:05:51 crc kubenswrapper[4757]: E1006 14:05:51.180305 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:06:06 crc kubenswrapper[4757]: I1006 14:06:06.180223 4757 scope.go:117] "RemoveContainer" containerID="9b8b911f587d3751346461ba61b46f96a76b6f00c23f408e5c12876bc7f9d8d4" Oct 06 14:06:06 crc kubenswrapper[4757]: E1006 14:06:06.181531 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:06:17 crc kubenswrapper[4757]: I1006 14:06:17.180368 4757 scope.go:117] "RemoveContainer" containerID="9b8b911f587d3751346461ba61b46f96a76b6f00c23f408e5c12876bc7f9d8d4" Oct 06 14:06:17 crc kubenswrapper[4757]: E1006 14:06:17.181515 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:06:31 crc kubenswrapper[4757]: I1006 14:06:31.180873 4757 scope.go:117] "RemoveContainer" containerID="9b8b911f587d3751346461ba61b46f96a76b6f00c23f408e5c12876bc7f9d8d4" Oct 06 14:06:31 crc kubenswrapper[4757]: E1006 14:06:31.182234 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:06:35 crc kubenswrapper[4757]: I1006 14:06:35.801030 4757 scope.go:117] "RemoveContainer" containerID="a56cbf1938af1879aefb87824c12e066c26f7d83d53119aca07be4518945693f" Oct 06 14:06:35 crc kubenswrapper[4757]: I1006 14:06:35.830585 4757 scope.go:117] "RemoveContainer" containerID="5d6cb1e28dd3b8e90a9d51e67db5c48ba545c42e1a58671c600c773ba7df9cc4" Oct 06 14:06:42 crc kubenswrapper[4757]: I1006 14:06:42.185225 4757 scope.go:117] "RemoveContainer" containerID="9b8b911f587d3751346461ba61b46f96a76b6f00c23f408e5c12876bc7f9d8d4" Oct 06 14:06:42 crc kubenswrapper[4757]: E1006 14:06:42.185868 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:06:56 crc kubenswrapper[4757]: I1006 14:06:56.180068 4757 scope.go:117] "RemoveContainer" containerID="9b8b911f587d3751346461ba61b46f96a76b6f00c23f408e5c12876bc7f9d8d4" Oct 06 14:06:56 crc kubenswrapper[4757]: E1006 14:06:56.181423 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:07:10 crc kubenswrapper[4757]: I1006 14:07:10.180286 4757 scope.go:117] "RemoveContainer" containerID="9b8b911f587d3751346461ba61b46f96a76b6f00c23f408e5c12876bc7f9d8d4" Oct 06 14:07:10 crc kubenswrapper[4757]: E1006 14:07:10.182561 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:07:25 crc kubenswrapper[4757]: I1006 14:07:25.179936 4757 scope.go:117] "RemoveContainer" containerID="9b8b911f587d3751346461ba61b46f96a76b6f00c23f408e5c12876bc7f9d8d4" Oct 06 14:07:25 crc kubenswrapper[4757]: E1006 14:07:25.181015 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:07:31 crc kubenswrapper[4757]: I1006 14:07:31.568970 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-cwdkh"] Oct 06 14:07:31 crc kubenswrapper[4757]: E1006 14:07:31.570004 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9ced939c-c6bd-41ec-a3db-d2ece022f035" containerName="registry-server" Oct 06 14:07:31 crc kubenswrapper[4757]: I1006 14:07:31.570040 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="9ced939c-c6bd-41ec-a3db-d2ece022f035" containerName="registry-server" Oct 06 14:07:31 crc kubenswrapper[4757]: E1006 14:07:31.570078 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9ced939c-c6bd-41ec-a3db-d2ece022f035" containerName="extract-utilities" Oct 06 14:07:31 crc kubenswrapper[4757]: I1006 14:07:31.570139 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="9ced939c-c6bd-41ec-a3db-d2ece022f035" containerName="extract-utilities" Oct 06 14:07:31 crc kubenswrapper[4757]: E1006 14:07:31.570183 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9ced939c-c6bd-41ec-a3db-d2ece022f035" containerName="extract-content" Oct 06 14:07:31 crc kubenswrapper[4757]: I1006 14:07:31.570203 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="9ced939c-c6bd-41ec-a3db-d2ece022f035" containerName="extract-content" Oct 06 14:07:31 crc kubenswrapper[4757]: I1006 14:07:31.570660 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="9ced939c-c6bd-41ec-a3db-d2ece022f035" containerName="registry-server" Oct 06 14:07:31 crc kubenswrapper[4757]: I1006 14:07:31.573216 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cwdkh" Oct 06 14:07:31 crc kubenswrapper[4757]: I1006 14:07:31.582542 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-cwdkh"] Oct 06 14:07:31 crc kubenswrapper[4757]: I1006 14:07:31.664792 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/56126440-030a-4d73-a36b-41ed7d44c23c-catalog-content\") pod \"redhat-operators-cwdkh\" (UID: \"56126440-030a-4d73-a36b-41ed7d44c23c\") " pod="openshift-marketplace/redhat-operators-cwdkh" Oct 06 14:07:31 crc kubenswrapper[4757]: I1006 14:07:31.664895 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/56126440-030a-4d73-a36b-41ed7d44c23c-utilities\") pod \"redhat-operators-cwdkh\" (UID: \"56126440-030a-4d73-a36b-41ed7d44c23c\") " pod="openshift-marketplace/redhat-operators-cwdkh" Oct 06 14:07:31 crc kubenswrapper[4757]: I1006 14:07:31.665146 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vgv5q\" (UniqueName: \"kubernetes.io/projected/56126440-030a-4d73-a36b-41ed7d44c23c-kube-api-access-vgv5q\") pod \"redhat-operators-cwdkh\" (UID: \"56126440-030a-4d73-a36b-41ed7d44c23c\") " pod="openshift-marketplace/redhat-operators-cwdkh" Oct 06 14:07:31 crc kubenswrapper[4757]: I1006 14:07:31.766638 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/56126440-030a-4d73-a36b-41ed7d44c23c-catalog-content\") pod \"redhat-operators-cwdkh\" (UID: \"56126440-030a-4d73-a36b-41ed7d44c23c\") " pod="openshift-marketplace/redhat-operators-cwdkh" Oct 06 14:07:31 crc kubenswrapper[4757]: I1006 14:07:31.766722 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/56126440-030a-4d73-a36b-41ed7d44c23c-utilities\") pod \"redhat-operators-cwdkh\" (UID: \"56126440-030a-4d73-a36b-41ed7d44c23c\") " pod="openshift-marketplace/redhat-operators-cwdkh" Oct 06 14:07:31 crc kubenswrapper[4757]: I1006 14:07:31.766802 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vgv5q\" (UniqueName: \"kubernetes.io/projected/56126440-030a-4d73-a36b-41ed7d44c23c-kube-api-access-vgv5q\") pod \"redhat-operators-cwdkh\" (UID: \"56126440-030a-4d73-a36b-41ed7d44c23c\") " pod="openshift-marketplace/redhat-operators-cwdkh" Oct 06 14:07:31 crc kubenswrapper[4757]: I1006 14:07:31.767185 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/56126440-030a-4d73-a36b-41ed7d44c23c-catalog-content\") pod \"redhat-operators-cwdkh\" (UID: \"56126440-030a-4d73-a36b-41ed7d44c23c\") " pod="openshift-marketplace/redhat-operators-cwdkh" Oct 06 14:07:31 crc kubenswrapper[4757]: I1006 14:07:31.767263 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/56126440-030a-4d73-a36b-41ed7d44c23c-utilities\") pod \"redhat-operators-cwdkh\" (UID: \"56126440-030a-4d73-a36b-41ed7d44c23c\") " pod="openshift-marketplace/redhat-operators-cwdkh" Oct 06 14:07:31 crc kubenswrapper[4757]: I1006 14:07:31.792411 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vgv5q\" (UniqueName: \"kubernetes.io/projected/56126440-030a-4d73-a36b-41ed7d44c23c-kube-api-access-vgv5q\") pod \"redhat-operators-cwdkh\" (UID: \"56126440-030a-4d73-a36b-41ed7d44c23c\") " pod="openshift-marketplace/redhat-operators-cwdkh" Oct 06 14:07:31 crc kubenswrapper[4757]: I1006 14:07:31.903305 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cwdkh" Oct 06 14:07:32 crc kubenswrapper[4757]: I1006 14:07:32.336309 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-cwdkh"] Oct 06 14:07:32 crc kubenswrapper[4757]: I1006 14:07:32.423966 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cwdkh" event={"ID":"56126440-030a-4d73-a36b-41ed7d44c23c","Type":"ContainerStarted","Data":"697cb2aea2c0cd79672bcc4c9dd5b1a4143e5b30461bfe05a7ee753ed608caae"} Oct 06 14:07:33 crc kubenswrapper[4757]: I1006 14:07:33.433989 4757 generic.go:334] "Generic (PLEG): container finished" podID="56126440-030a-4d73-a36b-41ed7d44c23c" containerID="7591358e928d0943b4e09cd2c7a08834bb39f2916ee86d1fd4cb5c623c5d5e28" exitCode=0 Oct 06 14:07:33 crc kubenswrapper[4757]: I1006 14:07:33.434057 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cwdkh" event={"ID":"56126440-030a-4d73-a36b-41ed7d44c23c","Type":"ContainerDied","Data":"7591358e928d0943b4e09cd2c7a08834bb39f2916ee86d1fd4cb5c623c5d5e28"} Oct 06 14:07:33 crc kubenswrapper[4757]: I1006 14:07:33.436499 4757 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 06 14:07:36 crc kubenswrapper[4757]: I1006 14:07:36.179939 4757 scope.go:117] "RemoveContainer" containerID="9b8b911f587d3751346461ba61b46f96a76b6f00c23f408e5c12876bc7f9d8d4" Oct 06 14:07:36 crc kubenswrapper[4757]: E1006 14:07:36.180581 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:07:39 crc kubenswrapper[4757]: I1006 14:07:39.481703 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cwdkh" event={"ID":"56126440-030a-4d73-a36b-41ed7d44c23c","Type":"ContainerStarted","Data":"687f5e55231eea5f3eb76845c1c82b36f4afad618df922002b0785fbecf77207"} Oct 06 14:07:40 crc kubenswrapper[4757]: I1006 14:07:40.502999 4757 generic.go:334] "Generic (PLEG): container finished" podID="56126440-030a-4d73-a36b-41ed7d44c23c" containerID="687f5e55231eea5f3eb76845c1c82b36f4afad618df922002b0785fbecf77207" exitCode=0 Oct 06 14:07:40 crc kubenswrapper[4757]: I1006 14:07:40.503117 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cwdkh" event={"ID":"56126440-030a-4d73-a36b-41ed7d44c23c","Type":"ContainerDied","Data":"687f5e55231eea5f3eb76845c1c82b36f4afad618df922002b0785fbecf77207"} Oct 06 14:07:41 crc kubenswrapper[4757]: I1006 14:07:41.513594 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cwdkh" event={"ID":"56126440-030a-4d73-a36b-41ed7d44c23c","Type":"ContainerStarted","Data":"3fc416aa9574b7702744760e9f2df9a32740fe16c4d223526bea8d069ba5f7fd"} Oct 06 14:07:41 crc kubenswrapper[4757]: I1006 14:07:41.535711 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-cwdkh" podStartSLOduration=3.024915104 podStartE2EDuration="10.535677793s" podCreationTimestamp="2025-10-06 14:07:31 +0000 UTC" firstStartedPulling="2025-10-06 14:07:33.436189469 +0000 UTC m=+1741.933508026" lastFinishedPulling="2025-10-06 14:07:40.946952178 +0000 UTC m=+1749.444270715" observedRunningTime="2025-10-06 14:07:41.531970394 +0000 UTC m=+1750.029288951" watchObservedRunningTime="2025-10-06 14:07:41.535677793 +0000 UTC m=+1750.032996370" Oct 06 14:07:41 crc kubenswrapper[4757]: I1006 14:07:41.903833 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-cwdkh" Oct 06 14:07:41 crc kubenswrapper[4757]: I1006 14:07:41.903887 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-cwdkh" Oct 06 14:07:42 crc kubenswrapper[4757]: I1006 14:07:42.957227 4757 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-cwdkh" podUID="56126440-030a-4d73-a36b-41ed7d44c23c" containerName="registry-server" probeResult="failure" output=< Oct 06 14:07:42 crc kubenswrapper[4757]: timeout: failed to connect service ":50051" within 1s Oct 06 14:07:42 crc kubenswrapper[4757]: > Oct 06 14:07:50 crc kubenswrapper[4757]: I1006 14:07:50.181586 4757 scope.go:117] "RemoveContainer" containerID="9b8b911f587d3751346461ba61b46f96a76b6f00c23f408e5c12876bc7f9d8d4" Oct 06 14:07:50 crc kubenswrapper[4757]: E1006 14:07:50.182370 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:07:51 crc kubenswrapper[4757]: I1006 14:07:51.977034 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-cwdkh" Oct 06 14:07:52 crc kubenswrapper[4757]: I1006 14:07:52.030285 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-cwdkh" Oct 06 14:07:52 crc kubenswrapper[4757]: I1006 14:07:52.114813 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-cwdkh"] Oct 06 14:07:52 crc kubenswrapper[4757]: I1006 14:07:52.217072 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-rlx44"] Oct 06 14:07:52 crc kubenswrapper[4757]: I1006 14:07:52.217532 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-rlx44" podUID="4251acbc-50b9-4729-8fac-0186245abb51" containerName="registry-server" containerID="cri-o://a9d0f3c746e4c8c9240dcc8f966415bbe73f11a705cfb36d92432fd68e42dc85" gracePeriod=2 Oct 06 14:07:52 crc kubenswrapper[4757]: I1006 14:07:52.597062 4757 generic.go:334] "Generic (PLEG): container finished" podID="4251acbc-50b9-4729-8fac-0186245abb51" containerID="a9d0f3c746e4c8c9240dcc8f966415bbe73f11a705cfb36d92432fd68e42dc85" exitCode=0 Oct 06 14:07:52 crc kubenswrapper[4757]: I1006 14:07:52.597134 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rlx44" event={"ID":"4251acbc-50b9-4729-8fac-0186245abb51","Type":"ContainerDied","Data":"a9d0f3c746e4c8c9240dcc8f966415bbe73f11a705cfb36d92432fd68e42dc85"} Oct 06 14:07:52 crc kubenswrapper[4757]: I1006 14:07:52.597390 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rlx44" event={"ID":"4251acbc-50b9-4729-8fac-0186245abb51","Type":"ContainerDied","Data":"7a4ed93af7fd934a3a0380638fdb5e1bd3396daa31794e646139b6cbf163e306"} Oct 06 14:07:52 crc kubenswrapper[4757]: I1006 14:07:52.597405 4757 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7a4ed93af7fd934a3a0380638fdb5e1bd3396daa31794e646139b6cbf163e306" Oct 06 14:07:52 crc kubenswrapper[4757]: I1006 14:07:52.622401 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rlx44" Oct 06 14:07:52 crc kubenswrapper[4757]: I1006 14:07:52.670557 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4251acbc-50b9-4729-8fac-0186245abb51-catalog-content\") pod \"4251acbc-50b9-4729-8fac-0186245abb51\" (UID: \"4251acbc-50b9-4729-8fac-0186245abb51\") " Oct 06 14:07:52 crc kubenswrapper[4757]: I1006 14:07:52.670644 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4251acbc-50b9-4729-8fac-0186245abb51-utilities\") pod \"4251acbc-50b9-4729-8fac-0186245abb51\" (UID: \"4251acbc-50b9-4729-8fac-0186245abb51\") " Oct 06 14:07:52 crc kubenswrapper[4757]: I1006 14:07:52.670759 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s8g9b\" (UniqueName: \"kubernetes.io/projected/4251acbc-50b9-4729-8fac-0186245abb51-kube-api-access-s8g9b\") pod \"4251acbc-50b9-4729-8fac-0186245abb51\" (UID: \"4251acbc-50b9-4729-8fac-0186245abb51\") " Oct 06 14:07:52 crc kubenswrapper[4757]: I1006 14:07:52.671216 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4251acbc-50b9-4729-8fac-0186245abb51-utilities" (OuterVolumeSpecName: "utilities") pod "4251acbc-50b9-4729-8fac-0186245abb51" (UID: "4251acbc-50b9-4729-8fac-0186245abb51"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 14:07:52 crc kubenswrapper[4757]: I1006 14:07:52.694404 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4251acbc-50b9-4729-8fac-0186245abb51-kube-api-access-s8g9b" (OuterVolumeSpecName: "kube-api-access-s8g9b") pod "4251acbc-50b9-4729-8fac-0186245abb51" (UID: "4251acbc-50b9-4729-8fac-0186245abb51"). InnerVolumeSpecName "kube-api-access-s8g9b". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 14:07:52 crc kubenswrapper[4757]: I1006 14:07:52.755068 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4251acbc-50b9-4729-8fac-0186245abb51-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4251acbc-50b9-4729-8fac-0186245abb51" (UID: "4251acbc-50b9-4729-8fac-0186245abb51"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 14:07:52 crc kubenswrapper[4757]: I1006 14:07:52.771860 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s8g9b\" (UniqueName: \"kubernetes.io/projected/4251acbc-50b9-4729-8fac-0186245abb51-kube-api-access-s8g9b\") on node \"crc\" DevicePath \"\"" Oct 06 14:07:52 crc kubenswrapper[4757]: I1006 14:07:52.771894 4757 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4251acbc-50b9-4729-8fac-0186245abb51-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 06 14:07:52 crc kubenswrapper[4757]: I1006 14:07:52.771906 4757 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4251acbc-50b9-4729-8fac-0186245abb51-utilities\") on node \"crc\" DevicePath \"\"" Oct 06 14:07:53 crc kubenswrapper[4757]: I1006 14:07:53.604057 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rlx44" Oct 06 14:07:53 crc kubenswrapper[4757]: I1006 14:07:53.634849 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-rlx44"] Oct 06 14:07:53 crc kubenswrapper[4757]: I1006 14:07:53.645247 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-rlx44"] Oct 06 14:07:54 crc kubenswrapper[4757]: I1006 14:07:54.189125 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4251acbc-50b9-4729-8fac-0186245abb51" path="/var/lib/kubelet/pods/4251acbc-50b9-4729-8fac-0186245abb51/volumes" Oct 06 14:08:02 crc kubenswrapper[4757]: I1006 14:08:02.184251 4757 scope.go:117] "RemoveContainer" containerID="9b8b911f587d3751346461ba61b46f96a76b6f00c23f408e5c12876bc7f9d8d4" Oct 06 14:08:02 crc kubenswrapper[4757]: E1006 14:08:02.185083 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:08:15 crc kubenswrapper[4757]: I1006 14:08:15.179509 4757 scope.go:117] "RemoveContainer" containerID="9b8b911f587d3751346461ba61b46f96a76b6f00c23f408e5c12876bc7f9d8d4" Oct 06 14:08:15 crc kubenswrapper[4757]: E1006 14:08:15.180637 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:08:28 crc kubenswrapper[4757]: I1006 14:08:28.181750 4757 scope.go:117] "RemoveContainer" containerID="9b8b911f587d3751346461ba61b46f96a76b6f00c23f408e5c12876bc7f9d8d4" Oct 06 14:08:28 crc kubenswrapper[4757]: E1006 14:08:28.182770 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:08:35 crc kubenswrapper[4757]: I1006 14:08:35.897136 4757 scope.go:117] "RemoveContainer" containerID="a9d0f3c746e4c8c9240dcc8f966415bbe73f11a705cfb36d92432fd68e42dc85" Oct 06 14:08:35 crc kubenswrapper[4757]: I1006 14:08:35.935803 4757 scope.go:117] "RemoveContainer" containerID="abb56cdc9179190c6aa1b651ebe800e9b25f37e3914a3fe483900865f9904ee2" Oct 06 14:08:35 crc kubenswrapper[4757]: I1006 14:08:35.967956 4757 scope.go:117] "RemoveContainer" containerID="e8dfeafe9b988f9b21d1653abd90e66c96c15bf4bd22a4dc496a787fb5d8977e" Oct 06 14:08:43 crc kubenswrapper[4757]: I1006 14:08:43.179921 4757 scope.go:117] "RemoveContainer" containerID="9b8b911f587d3751346461ba61b46f96a76b6f00c23f408e5c12876bc7f9d8d4" Oct 06 14:08:43 crc kubenswrapper[4757]: E1006 14:08:43.180588 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:08:54 crc kubenswrapper[4757]: I1006 14:08:54.179949 4757 scope.go:117] "RemoveContainer" containerID="9b8b911f587d3751346461ba61b46f96a76b6f00c23f408e5c12876bc7f9d8d4" Oct 06 14:08:54 crc kubenswrapper[4757]: E1006 14:08:54.180659 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:09:09 crc kubenswrapper[4757]: I1006 14:09:09.180005 4757 scope.go:117] "RemoveContainer" containerID="9b8b911f587d3751346461ba61b46f96a76b6f00c23f408e5c12876bc7f9d8d4" Oct 06 14:09:10 crc kubenswrapper[4757]: I1006 14:09:10.266914 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" event={"ID":"0010c888-d5ad-4b2b-8309-1647fdf0dee3","Type":"ContainerStarted","Data":"7350ebc344678632d779b5daf23386ef70b7c699653aaea28f8e806f04c9879f"} Oct 06 14:11:34 crc kubenswrapper[4757]: I1006 14:11:34.360876 4757 patch_prober.go:28] interesting pod/machine-config-daemon-7tb7h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 06 14:11:34 crc kubenswrapper[4757]: I1006 14:11:34.361695 4757 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 06 14:12:04 crc kubenswrapper[4757]: I1006 14:12:04.361029 4757 patch_prober.go:28] interesting pod/machine-config-daemon-7tb7h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 06 14:12:04 crc kubenswrapper[4757]: I1006 14:12:04.361664 4757 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 06 14:12:34 crc kubenswrapper[4757]: I1006 14:12:34.361582 4757 patch_prober.go:28] interesting pod/machine-config-daemon-7tb7h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 06 14:12:34 crc kubenswrapper[4757]: I1006 14:12:34.362223 4757 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 06 14:12:34 crc kubenswrapper[4757]: I1006 14:12:34.362284 4757 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" Oct 06 14:12:34 crc kubenswrapper[4757]: I1006 14:12:34.363933 4757 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"7350ebc344678632d779b5daf23386ef70b7c699653aaea28f8e806f04c9879f"} pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 06 14:12:34 crc kubenswrapper[4757]: I1006 14:12:34.364061 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" containerName="machine-config-daemon" containerID="cri-o://7350ebc344678632d779b5daf23386ef70b7c699653aaea28f8e806f04c9879f" gracePeriod=600 Oct 06 14:12:35 crc kubenswrapper[4757]: I1006 14:12:35.060416 4757 generic.go:334] "Generic (PLEG): container finished" podID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" containerID="7350ebc344678632d779b5daf23386ef70b7c699653aaea28f8e806f04c9879f" exitCode=0 Oct 06 14:12:35 crc kubenswrapper[4757]: I1006 14:12:35.060452 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" event={"ID":"0010c888-d5ad-4b2b-8309-1647fdf0dee3","Type":"ContainerDied","Data":"7350ebc344678632d779b5daf23386ef70b7c699653aaea28f8e806f04c9879f"} Oct 06 14:12:35 crc kubenswrapper[4757]: I1006 14:12:35.060808 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" event={"ID":"0010c888-d5ad-4b2b-8309-1647fdf0dee3","Type":"ContainerStarted","Data":"8e7fc26afed7cf734cb73a10dced2cf51322263c35999336621028eb82b97c97"} Oct 06 14:12:35 crc kubenswrapper[4757]: I1006 14:12:35.060830 4757 scope.go:117] "RemoveContainer" containerID="9b8b911f587d3751346461ba61b46f96a76b6f00c23f408e5c12876bc7f9d8d4" Oct 06 14:13:38 crc kubenswrapper[4757]: I1006 14:13:38.221253 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-lm25b"] Oct 06 14:13:38 crc kubenswrapper[4757]: E1006 14:13:38.221991 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4251acbc-50b9-4729-8fac-0186245abb51" containerName="extract-content" Oct 06 14:13:38 crc kubenswrapper[4757]: I1006 14:13:38.222003 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="4251acbc-50b9-4729-8fac-0186245abb51" containerName="extract-content" Oct 06 14:13:38 crc kubenswrapper[4757]: E1006 14:13:38.222018 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4251acbc-50b9-4729-8fac-0186245abb51" containerName="extract-utilities" Oct 06 14:13:38 crc kubenswrapper[4757]: I1006 14:13:38.222023 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="4251acbc-50b9-4729-8fac-0186245abb51" containerName="extract-utilities" Oct 06 14:13:38 crc kubenswrapper[4757]: E1006 14:13:38.222038 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4251acbc-50b9-4729-8fac-0186245abb51" containerName="registry-server" Oct 06 14:13:38 crc kubenswrapper[4757]: I1006 14:13:38.222044 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="4251acbc-50b9-4729-8fac-0186245abb51" containerName="registry-server" Oct 06 14:13:38 crc kubenswrapper[4757]: I1006 14:13:38.222221 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="4251acbc-50b9-4729-8fac-0186245abb51" containerName="registry-server" Oct 06 14:13:38 crc kubenswrapper[4757]: I1006 14:13:38.223137 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lm25b" Oct 06 14:13:38 crc kubenswrapper[4757]: I1006 14:13:38.240790 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-lm25b"] Oct 06 14:13:38 crc kubenswrapper[4757]: I1006 14:13:38.322291 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z5dvd\" (UniqueName: \"kubernetes.io/projected/8601701e-46e7-4e9e-b816-41ca2cb6feec-kube-api-access-z5dvd\") pod \"certified-operators-lm25b\" (UID: \"8601701e-46e7-4e9e-b816-41ca2cb6feec\") " pod="openshift-marketplace/certified-operators-lm25b" Oct 06 14:13:38 crc kubenswrapper[4757]: I1006 14:13:38.322378 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8601701e-46e7-4e9e-b816-41ca2cb6feec-catalog-content\") pod \"certified-operators-lm25b\" (UID: \"8601701e-46e7-4e9e-b816-41ca2cb6feec\") " pod="openshift-marketplace/certified-operators-lm25b" Oct 06 14:13:38 crc kubenswrapper[4757]: I1006 14:13:38.322484 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8601701e-46e7-4e9e-b816-41ca2cb6feec-utilities\") pod \"certified-operators-lm25b\" (UID: \"8601701e-46e7-4e9e-b816-41ca2cb6feec\") " pod="openshift-marketplace/certified-operators-lm25b" Oct 06 14:13:38 crc kubenswrapper[4757]: I1006 14:13:38.423411 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8601701e-46e7-4e9e-b816-41ca2cb6feec-utilities\") pod \"certified-operators-lm25b\" (UID: \"8601701e-46e7-4e9e-b816-41ca2cb6feec\") " pod="openshift-marketplace/certified-operators-lm25b" Oct 06 14:13:38 crc kubenswrapper[4757]: I1006 14:13:38.423757 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z5dvd\" (UniqueName: \"kubernetes.io/projected/8601701e-46e7-4e9e-b816-41ca2cb6feec-kube-api-access-z5dvd\") pod \"certified-operators-lm25b\" (UID: \"8601701e-46e7-4e9e-b816-41ca2cb6feec\") " pod="openshift-marketplace/certified-operators-lm25b" Oct 06 14:13:38 crc kubenswrapper[4757]: I1006 14:13:38.423904 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8601701e-46e7-4e9e-b816-41ca2cb6feec-catalog-content\") pod \"certified-operators-lm25b\" (UID: \"8601701e-46e7-4e9e-b816-41ca2cb6feec\") " pod="openshift-marketplace/certified-operators-lm25b" Oct 06 14:13:38 crc kubenswrapper[4757]: I1006 14:13:38.424065 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8601701e-46e7-4e9e-b816-41ca2cb6feec-utilities\") pod \"certified-operators-lm25b\" (UID: \"8601701e-46e7-4e9e-b816-41ca2cb6feec\") " pod="openshift-marketplace/certified-operators-lm25b" Oct 06 14:13:38 crc kubenswrapper[4757]: I1006 14:13:38.424321 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8601701e-46e7-4e9e-b816-41ca2cb6feec-catalog-content\") pod \"certified-operators-lm25b\" (UID: \"8601701e-46e7-4e9e-b816-41ca2cb6feec\") " pod="openshift-marketplace/certified-operators-lm25b" Oct 06 14:13:38 crc kubenswrapper[4757]: I1006 14:13:38.444018 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z5dvd\" (UniqueName: \"kubernetes.io/projected/8601701e-46e7-4e9e-b816-41ca2cb6feec-kube-api-access-z5dvd\") pod \"certified-operators-lm25b\" (UID: \"8601701e-46e7-4e9e-b816-41ca2cb6feec\") " pod="openshift-marketplace/certified-operators-lm25b" Oct 06 14:13:38 crc kubenswrapper[4757]: I1006 14:13:38.545057 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lm25b" Oct 06 14:13:39 crc kubenswrapper[4757]: I1006 14:13:39.000787 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-lm25b"] Oct 06 14:13:39 crc kubenswrapper[4757]: I1006 14:13:39.609771 4757 generic.go:334] "Generic (PLEG): container finished" podID="8601701e-46e7-4e9e-b816-41ca2cb6feec" containerID="2289b1542d2b528999705924c1732d5b62e7a83413719728e53efdec3d57968e" exitCode=0 Oct 06 14:13:39 crc kubenswrapper[4757]: I1006 14:13:39.609877 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lm25b" event={"ID":"8601701e-46e7-4e9e-b816-41ca2cb6feec","Type":"ContainerDied","Data":"2289b1542d2b528999705924c1732d5b62e7a83413719728e53efdec3d57968e"} Oct 06 14:13:39 crc kubenswrapper[4757]: I1006 14:13:39.610191 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lm25b" event={"ID":"8601701e-46e7-4e9e-b816-41ca2cb6feec","Type":"ContainerStarted","Data":"2a9b8c033a1704a6c05f6bbcf11e6fdf4a894ca9c997615d06dfbda9beffa313"} Oct 06 14:13:39 crc kubenswrapper[4757]: I1006 14:13:39.612057 4757 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 06 14:13:40 crc kubenswrapper[4757]: I1006 14:13:40.619713 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lm25b" event={"ID":"8601701e-46e7-4e9e-b816-41ca2cb6feec","Type":"ContainerStarted","Data":"438421e7b45ef80e507d0ce2d7cb044de2163d628322b9d3c5df72d16a89e7c2"} Oct 06 14:13:41 crc kubenswrapper[4757]: I1006 14:13:41.629417 4757 generic.go:334] "Generic (PLEG): container finished" podID="8601701e-46e7-4e9e-b816-41ca2cb6feec" containerID="438421e7b45ef80e507d0ce2d7cb044de2163d628322b9d3c5df72d16a89e7c2" exitCode=0 Oct 06 14:13:41 crc kubenswrapper[4757]: I1006 14:13:41.629490 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lm25b" event={"ID":"8601701e-46e7-4e9e-b816-41ca2cb6feec","Type":"ContainerDied","Data":"438421e7b45ef80e507d0ce2d7cb044de2163d628322b9d3c5df72d16a89e7c2"} Oct 06 14:13:42 crc kubenswrapper[4757]: I1006 14:13:42.641867 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lm25b" event={"ID":"8601701e-46e7-4e9e-b816-41ca2cb6feec","Type":"ContainerStarted","Data":"d844cd348f2e374e623b9df3f468fcec1aead0c3ee61e31719aef63ad7995966"} Oct 06 14:13:42 crc kubenswrapper[4757]: I1006 14:13:42.665745 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-lm25b" podStartSLOduration=2.179497002 podStartE2EDuration="4.665715339s" podCreationTimestamp="2025-10-06 14:13:38 +0000 UTC" firstStartedPulling="2025-10-06 14:13:39.611804724 +0000 UTC m=+2108.109123261" lastFinishedPulling="2025-10-06 14:13:42.098023041 +0000 UTC m=+2110.595341598" observedRunningTime="2025-10-06 14:13:42.656470996 +0000 UTC m=+2111.153789543" watchObservedRunningTime="2025-10-06 14:13:42.665715339 +0000 UTC m=+2111.163033926" Oct 06 14:13:45 crc kubenswrapper[4757]: I1006 14:13:45.591307 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-l7rjh"] Oct 06 14:13:45 crc kubenswrapper[4757]: I1006 14:13:45.595673 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-l7rjh" Oct 06 14:13:45 crc kubenswrapper[4757]: I1006 14:13:45.604479 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-l7rjh"] Oct 06 14:13:45 crc kubenswrapper[4757]: I1006 14:13:45.749949 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e67fe126-23b7-4891-bd7f-e14842cf1276-utilities\") pod \"community-operators-l7rjh\" (UID: \"e67fe126-23b7-4891-bd7f-e14842cf1276\") " pod="openshift-marketplace/community-operators-l7rjh" Oct 06 14:13:45 crc kubenswrapper[4757]: I1006 14:13:45.750343 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e67fe126-23b7-4891-bd7f-e14842cf1276-catalog-content\") pod \"community-operators-l7rjh\" (UID: \"e67fe126-23b7-4891-bd7f-e14842cf1276\") " pod="openshift-marketplace/community-operators-l7rjh" Oct 06 14:13:45 crc kubenswrapper[4757]: I1006 14:13:45.750452 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hqrkq\" (UniqueName: \"kubernetes.io/projected/e67fe126-23b7-4891-bd7f-e14842cf1276-kube-api-access-hqrkq\") pod \"community-operators-l7rjh\" (UID: \"e67fe126-23b7-4891-bd7f-e14842cf1276\") " pod="openshift-marketplace/community-operators-l7rjh" Oct 06 14:13:45 crc kubenswrapper[4757]: I1006 14:13:45.851603 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hqrkq\" (UniqueName: \"kubernetes.io/projected/e67fe126-23b7-4891-bd7f-e14842cf1276-kube-api-access-hqrkq\") pod \"community-operators-l7rjh\" (UID: \"e67fe126-23b7-4891-bd7f-e14842cf1276\") " pod="openshift-marketplace/community-operators-l7rjh" Oct 06 14:13:45 crc kubenswrapper[4757]: I1006 14:13:45.851710 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e67fe126-23b7-4891-bd7f-e14842cf1276-utilities\") pod \"community-operators-l7rjh\" (UID: \"e67fe126-23b7-4891-bd7f-e14842cf1276\") " pod="openshift-marketplace/community-operators-l7rjh" Oct 06 14:13:45 crc kubenswrapper[4757]: I1006 14:13:45.851764 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e67fe126-23b7-4891-bd7f-e14842cf1276-catalog-content\") pod \"community-operators-l7rjh\" (UID: \"e67fe126-23b7-4891-bd7f-e14842cf1276\") " pod="openshift-marketplace/community-operators-l7rjh" Oct 06 14:13:45 crc kubenswrapper[4757]: I1006 14:13:45.852415 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e67fe126-23b7-4891-bd7f-e14842cf1276-catalog-content\") pod \"community-operators-l7rjh\" (UID: \"e67fe126-23b7-4891-bd7f-e14842cf1276\") " pod="openshift-marketplace/community-operators-l7rjh" Oct 06 14:13:45 crc kubenswrapper[4757]: I1006 14:13:45.852509 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e67fe126-23b7-4891-bd7f-e14842cf1276-utilities\") pod \"community-operators-l7rjh\" (UID: \"e67fe126-23b7-4891-bd7f-e14842cf1276\") " pod="openshift-marketplace/community-operators-l7rjh" Oct 06 14:13:45 crc kubenswrapper[4757]: I1006 14:13:45.880946 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hqrkq\" (UniqueName: \"kubernetes.io/projected/e67fe126-23b7-4891-bd7f-e14842cf1276-kube-api-access-hqrkq\") pod \"community-operators-l7rjh\" (UID: \"e67fe126-23b7-4891-bd7f-e14842cf1276\") " pod="openshift-marketplace/community-operators-l7rjh" Oct 06 14:13:45 crc kubenswrapper[4757]: I1006 14:13:45.928577 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-l7rjh" Oct 06 14:13:46 crc kubenswrapper[4757]: I1006 14:13:46.255487 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-l7rjh"] Oct 06 14:13:46 crc kubenswrapper[4757]: I1006 14:13:46.672008 4757 generic.go:334] "Generic (PLEG): container finished" podID="e67fe126-23b7-4891-bd7f-e14842cf1276" containerID="0238c7aa6202cad00b10bf4099cbe3a1093dc252307118e93267c1b144f988a5" exitCode=0 Oct 06 14:13:46 crc kubenswrapper[4757]: I1006 14:13:46.672073 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-l7rjh" event={"ID":"e67fe126-23b7-4891-bd7f-e14842cf1276","Type":"ContainerDied","Data":"0238c7aa6202cad00b10bf4099cbe3a1093dc252307118e93267c1b144f988a5"} Oct 06 14:13:46 crc kubenswrapper[4757]: I1006 14:13:46.672132 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-l7rjh" event={"ID":"e67fe126-23b7-4891-bd7f-e14842cf1276","Type":"ContainerStarted","Data":"be9a4061992ce08d00a312ba144dee7f1de97c3df28d70a5e767518e9b0004f8"} Oct 06 14:13:47 crc kubenswrapper[4757]: I1006 14:13:47.682787 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-l7rjh" event={"ID":"e67fe126-23b7-4891-bd7f-e14842cf1276","Type":"ContainerStarted","Data":"cca48f0c308a20da6a91286b1408b50a12040840b893680c37131a11c28a09b1"} Oct 06 14:13:48 crc kubenswrapper[4757]: I1006 14:13:48.545757 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-lm25b" Oct 06 14:13:48 crc kubenswrapper[4757]: I1006 14:13:48.545840 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-lm25b" Oct 06 14:13:48 crc kubenswrapper[4757]: I1006 14:13:48.586445 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-lm25b" Oct 06 14:13:48 crc kubenswrapper[4757]: I1006 14:13:48.693705 4757 generic.go:334] "Generic (PLEG): container finished" podID="e67fe126-23b7-4891-bd7f-e14842cf1276" containerID="cca48f0c308a20da6a91286b1408b50a12040840b893680c37131a11c28a09b1" exitCode=0 Oct 06 14:13:48 crc kubenswrapper[4757]: I1006 14:13:48.695232 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-l7rjh" event={"ID":"e67fe126-23b7-4891-bd7f-e14842cf1276","Type":"ContainerDied","Data":"cca48f0c308a20da6a91286b1408b50a12040840b893680c37131a11c28a09b1"} Oct 06 14:13:48 crc kubenswrapper[4757]: I1006 14:13:48.744444 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-lm25b" Oct 06 14:13:49 crc kubenswrapper[4757]: I1006 14:13:49.710244 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-l7rjh" event={"ID":"e67fe126-23b7-4891-bd7f-e14842cf1276","Type":"ContainerStarted","Data":"c2b2a0587877aff0d38f3bb8537806a3d1545255ca63bd72458625ae0c1ba3ec"} Oct 06 14:13:49 crc kubenswrapper[4757]: I1006 14:13:49.744664 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-l7rjh" podStartSLOduration=2.235983501 podStartE2EDuration="4.744641628s" podCreationTimestamp="2025-10-06 14:13:45 +0000 UTC" firstStartedPulling="2025-10-06 14:13:46.673796727 +0000 UTC m=+2115.171115264" lastFinishedPulling="2025-10-06 14:13:49.182454854 +0000 UTC m=+2117.679773391" observedRunningTime="2025-10-06 14:13:49.739680201 +0000 UTC m=+2118.236998738" watchObservedRunningTime="2025-10-06 14:13:49.744641628 +0000 UTC m=+2118.241960165" Oct 06 14:13:50 crc kubenswrapper[4757]: I1006 14:13:50.984077 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-lm25b"] Oct 06 14:13:50 crc kubenswrapper[4757]: I1006 14:13:50.985430 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-lm25b" podUID="8601701e-46e7-4e9e-b816-41ca2cb6feec" containerName="registry-server" containerID="cri-o://d844cd348f2e374e623b9df3f468fcec1aead0c3ee61e31719aef63ad7995966" gracePeriod=2 Oct 06 14:13:51 crc kubenswrapper[4757]: I1006 14:13:51.735971 4757 generic.go:334] "Generic (PLEG): container finished" podID="8601701e-46e7-4e9e-b816-41ca2cb6feec" containerID="d844cd348f2e374e623b9df3f468fcec1aead0c3ee61e31719aef63ad7995966" exitCode=0 Oct 06 14:13:51 crc kubenswrapper[4757]: I1006 14:13:51.736039 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lm25b" event={"ID":"8601701e-46e7-4e9e-b816-41ca2cb6feec","Type":"ContainerDied","Data":"d844cd348f2e374e623b9df3f468fcec1aead0c3ee61e31719aef63ad7995966"} Oct 06 14:13:51 crc kubenswrapper[4757]: I1006 14:13:51.924278 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lm25b" Oct 06 14:13:52 crc kubenswrapper[4757]: I1006 14:13:52.064436 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z5dvd\" (UniqueName: \"kubernetes.io/projected/8601701e-46e7-4e9e-b816-41ca2cb6feec-kube-api-access-z5dvd\") pod \"8601701e-46e7-4e9e-b816-41ca2cb6feec\" (UID: \"8601701e-46e7-4e9e-b816-41ca2cb6feec\") " Oct 06 14:13:52 crc kubenswrapper[4757]: I1006 14:13:52.064490 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8601701e-46e7-4e9e-b816-41ca2cb6feec-utilities\") pod \"8601701e-46e7-4e9e-b816-41ca2cb6feec\" (UID: \"8601701e-46e7-4e9e-b816-41ca2cb6feec\") " Oct 06 14:13:52 crc kubenswrapper[4757]: I1006 14:13:52.064614 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8601701e-46e7-4e9e-b816-41ca2cb6feec-catalog-content\") pod \"8601701e-46e7-4e9e-b816-41ca2cb6feec\" (UID: \"8601701e-46e7-4e9e-b816-41ca2cb6feec\") " Oct 06 14:13:52 crc kubenswrapper[4757]: I1006 14:13:52.065720 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8601701e-46e7-4e9e-b816-41ca2cb6feec-utilities" (OuterVolumeSpecName: "utilities") pod "8601701e-46e7-4e9e-b816-41ca2cb6feec" (UID: "8601701e-46e7-4e9e-b816-41ca2cb6feec"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 14:13:52 crc kubenswrapper[4757]: I1006 14:13:52.082478 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8601701e-46e7-4e9e-b816-41ca2cb6feec-kube-api-access-z5dvd" (OuterVolumeSpecName: "kube-api-access-z5dvd") pod "8601701e-46e7-4e9e-b816-41ca2cb6feec" (UID: "8601701e-46e7-4e9e-b816-41ca2cb6feec"). InnerVolumeSpecName "kube-api-access-z5dvd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 14:13:52 crc kubenswrapper[4757]: I1006 14:13:52.118612 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8601701e-46e7-4e9e-b816-41ca2cb6feec-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8601701e-46e7-4e9e-b816-41ca2cb6feec" (UID: "8601701e-46e7-4e9e-b816-41ca2cb6feec"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 14:13:52 crc kubenswrapper[4757]: I1006 14:13:52.165688 4757 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8601701e-46e7-4e9e-b816-41ca2cb6feec-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 06 14:13:52 crc kubenswrapper[4757]: I1006 14:13:52.165732 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z5dvd\" (UniqueName: \"kubernetes.io/projected/8601701e-46e7-4e9e-b816-41ca2cb6feec-kube-api-access-z5dvd\") on node \"crc\" DevicePath \"\"" Oct 06 14:13:52 crc kubenswrapper[4757]: I1006 14:13:52.165749 4757 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8601701e-46e7-4e9e-b816-41ca2cb6feec-utilities\") on node \"crc\" DevicePath \"\"" Oct 06 14:13:52 crc kubenswrapper[4757]: I1006 14:13:52.747491 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lm25b" event={"ID":"8601701e-46e7-4e9e-b816-41ca2cb6feec","Type":"ContainerDied","Data":"2a9b8c033a1704a6c05f6bbcf11e6fdf4a894ca9c997615d06dfbda9beffa313"} Oct 06 14:13:52 crc kubenswrapper[4757]: I1006 14:13:52.747798 4757 scope.go:117] "RemoveContainer" containerID="d844cd348f2e374e623b9df3f468fcec1aead0c3ee61e31719aef63ad7995966" Oct 06 14:13:52 crc kubenswrapper[4757]: I1006 14:13:52.747588 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lm25b" Oct 06 14:13:52 crc kubenswrapper[4757]: I1006 14:13:52.778832 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-lm25b"] Oct 06 14:13:52 crc kubenswrapper[4757]: I1006 14:13:52.779675 4757 scope.go:117] "RemoveContainer" containerID="438421e7b45ef80e507d0ce2d7cb044de2163d628322b9d3c5df72d16a89e7c2" Oct 06 14:13:52 crc kubenswrapper[4757]: I1006 14:13:52.785682 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-lm25b"] Oct 06 14:13:52 crc kubenswrapper[4757]: I1006 14:13:52.801821 4757 scope.go:117] "RemoveContainer" containerID="2289b1542d2b528999705924c1732d5b62e7a83413719728e53efdec3d57968e" Oct 06 14:13:54 crc kubenswrapper[4757]: I1006 14:13:54.197194 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8601701e-46e7-4e9e-b816-41ca2cb6feec" path="/var/lib/kubelet/pods/8601701e-46e7-4e9e-b816-41ca2cb6feec/volumes" Oct 06 14:13:55 crc kubenswrapper[4757]: I1006 14:13:55.928769 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-l7rjh" Oct 06 14:13:55 crc kubenswrapper[4757]: I1006 14:13:55.929275 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-l7rjh" Oct 06 14:13:56 crc kubenswrapper[4757]: I1006 14:13:56.001121 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-l7rjh" Oct 06 14:13:56 crc kubenswrapper[4757]: I1006 14:13:56.861901 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-l7rjh" Oct 06 14:13:56 crc kubenswrapper[4757]: I1006 14:13:56.976241 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-l7rjh"] Oct 06 14:13:58 crc kubenswrapper[4757]: I1006 14:13:58.799597 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-l7rjh" podUID="e67fe126-23b7-4891-bd7f-e14842cf1276" containerName="registry-server" containerID="cri-o://c2b2a0587877aff0d38f3bb8537806a3d1545255ca63bd72458625ae0c1ba3ec" gracePeriod=2 Oct 06 14:13:59 crc kubenswrapper[4757]: I1006 14:13:59.208654 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-l7rjh" Oct 06 14:13:59 crc kubenswrapper[4757]: I1006 14:13:59.381655 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hqrkq\" (UniqueName: \"kubernetes.io/projected/e67fe126-23b7-4891-bd7f-e14842cf1276-kube-api-access-hqrkq\") pod \"e67fe126-23b7-4891-bd7f-e14842cf1276\" (UID: \"e67fe126-23b7-4891-bd7f-e14842cf1276\") " Oct 06 14:13:59 crc kubenswrapper[4757]: I1006 14:13:59.381768 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e67fe126-23b7-4891-bd7f-e14842cf1276-utilities\") pod \"e67fe126-23b7-4891-bd7f-e14842cf1276\" (UID: \"e67fe126-23b7-4891-bd7f-e14842cf1276\") " Oct 06 14:13:59 crc kubenswrapper[4757]: I1006 14:13:59.381858 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e67fe126-23b7-4891-bd7f-e14842cf1276-catalog-content\") pod \"e67fe126-23b7-4891-bd7f-e14842cf1276\" (UID: \"e67fe126-23b7-4891-bd7f-e14842cf1276\") " Oct 06 14:13:59 crc kubenswrapper[4757]: I1006 14:13:59.383157 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e67fe126-23b7-4891-bd7f-e14842cf1276-utilities" (OuterVolumeSpecName: "utilities") pod "e67fe126-23b7-4891-bd7f-e14842cf1276" (UID: "e67fe126-23b7-4891-bd7f-e14842cf1276"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 14:13:59 crc kubenswrapper[4757]: I1006 14:13:59.391705 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e67fe126-23b7-4891-bd7f-e14842cf1276-kube-api-access-hqrkq" (OuterVolumeSpecName: "kube-api-access-hqrkq") pod "e67fe126-23b7-4891-bd7f-e14842cf1276" (UID: "e67fe126-23b7-4891-bd7f-e14842cf1276"). InnerVolumeSpecName "kube-api-access-hqrkq". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 14:13:59 crc kubenswrapper[4757]: I1006 14:13:59.433400 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e67fe126-23b7-4891-bd7f-e14842cf1276-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e67fe126-23b7-4891-bd7f-e14842cf1276" (UID: "e67fe126-23b7-4891-bd7f-e14842cf1276"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 14:13:59 crc kubenswrapper[4757]: I1006 14:13:59.483060 4757 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e67fe126-23b7-4891-bd7f-e14842cf1276-utilities\") on node \"crc\" DevicePath \"\"" Oct 06 14:13:59 crc kubenswrapper[4757]: I1006 14:13:59.483108 4757 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e67fe126-23b7-4891-bd7f-e14842cf1276-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 06 14:13:59 crc kubenswrapper[4757]: I1006 14:13:59.483121 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hqrkq\" (UniqueName: \"kubernetes.io/projected/e67fe126-23b7-4891-bd7f-e14842cf1276-kube-api-access-hqrkq\") on node \"crc\" DevicePath \"\"" Oct 06 14:13:59 crc kubenswrapper[4757]: I1006 14:13:59.817915 4757 generic.go:334] "Generic (PLEG): container finished" podID="e67fe126-23b7-4891-bd7f-e14842cf1276" containerID="c2b2a0587877aff0d38f3bb8537806a3d1545255ca63bd72458625ae0c1ba3ec" exitCode=0 Oct 06 14:13:59 crc kubenswrapper[4757]: I1006 14:13:59.817987 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-l7rjh" event={"ID":"e67fe126-23b7-4891-bd7f-e14842cf1276","Type":"ContainerDied","Data":"c2b2a0587877aff0d38f3bb8537806a3d1545255ca63bd72458625ae0c1ba3ec"} Oct 06 14:13:59 crc kubenswrapper[4757]: I1006 14:13:59.818074 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-l7rjh" Oct 06 14:13:59 crc kubenswrapper[4757]: I1006 14:13:59.818474 4757 scope.go:117] "RemoveContainer" containerID="c2b2a0587877aff0d38f3bb8537806a3d1545255ca63bd72458625ae0c1ba3ec" Oct 06 14:13:59 crc kubenswrapper[4757]: I1006 14:13:59.818451 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-l7rjh" event={"ID":"e67fe126-23b7-4891-bd7f-e14842cf1276","Type":"ContainerDied","Data":"be9a4061992ce08d00a312ba144dee7f1de97c3df28d70a5e767518e9b0004f8"} Oct 06 14:13:59 crc kubenswrapper[4757]: I1006 14:13:59.851649 4757 scope.go:117] "RemoveContainer" containerID="cca48f0c308a20da6a91286b1408b50a12040840b893680c37131a11c28a09b1" Oct 06 14:13:59 crc kubenswrapper[4757]: I1006 14:13:59.873652 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-l7rjh"] Oct 06 14:13:59 crc kubenswrapper[4757]: I1006 14:13:59.880774 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-l7rjh"] Oct 06 14:13:59 crc kubenswrapper[4757]: I1006 14:13:59.885607 4757 scope.go:117] "RemoveContainer" containerID="0238c7aa6202cad00b10bf4099cbe3a1093dc252307118e93267c1b144f988a5" Oct 06 14:13:59 crc kubenswrapper[4757]: I1006 14:13:59.942518 4757 scope.go:117] "RemoveContainer" containerID="c2b2a0587877aff0d38f3bb8537806a3d1545255ca63bd72458625ae0c1ba3ec" Oct 06 14:13:59 crc kubenswrapper[4757]: E1006 14:13:59.942977 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c2b2a0587877aff0d38f3bb8537806a3d1545255ca63bd72458625ae0c1ba3ec\": container with ID starting with c2b2a0587877aff0d38f3bb8537806a3d1545255ca63bd72458625ae0c1ba3ec not found: ID does not exist" containerID="c2b2a0587877aff0d38f3bb8537806a3d1545255ca63bd72458625ae0c1ba3ec" Oct 06 14:13:59 crc kubenswrapper[4757]: I1006 14:13:59.943016 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c2b2a0587877aff0d38f3bb8537806a3d1545255ca63bd72458625ae0c1ba3ec"} err="failed to get container status \"c2b2a0587877aff0d38f3bb8537806a3d1545255ca63bd72458625ae0c1ba3ec\": rpc error: code = NotFound desc = could not find container \"c2b2a0587877aff0d38f3bb8537806a3d1545255ca63bd72458625ae0c1ba3ec\": container with ID starting with c2b2a0587877aff0d38f3bb8537806a3d1545255ca63bd72458625ae0c1ba3ec not found: ID does not exist" Oct 06 14:13:59 crc kubenswrapper[4757]: I1006 14:13:59.943043 4757 scope.go:117] "RemoveContainer" containerID="cca48f0c308a20da6a91286b1408b50a12040840b893680c37131a11c28a09b1" Oct 06 14:13:59 crc kubenswrapper[4757]: E1006 14:13:59.943450 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cca48f0c308a20da6a91286b1408b50a12040840b893680c37131a11c28a09b1\": container with ID starting with cca48f0c308a20da6a91286b1408b50a12040840b893680c37131a11c28a09b1 not found: ID does not exist" containerID="cca48f0c308a20da6a91286b1408b50a12040840b893680c37131a11c28a09b1" Oct 06 14:13:59 crc kubenswrapper[4757]: I1006 14:13:59.943491 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cca48f0c308a20da6a91286b1408b50a12040840b893680c37131a11c28a09b1"} err="failed to get container status \"cca48f0c308a20da6a91286b1408b50a12040840b893680c37131a11c28a09b1\": rpc error: code = NotFound desc = could not find container \"cca48f0c308a20da6a91286b1408b50a12040840b893680c37131a11c28a09b1\": container with ID starting with cca48f0c308a20da6a91286b1408b50a12040840b893680c37131a11c28a09b1 not found: ID does not exist" Oct 06 14:13:59 crc kubenswrapper[4757]: I1006 14:13:59.943526 4757 scope.go:117] "RemoveContainer" containerID="0238c7aa6202cad00b10bf4099cbe3a1093dc252307118e93267c1b144f988a5" Oct 06 14:13:59 crc kubenswrapper[4757]: E1006 14:13:59.943904 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0238c7aa6202cad00b10bf4099cbe3a1093dc252307118e93267c1b144f988a5\": container with ID starting with 0238c7aa6202cad00b10bf4099cbe3a1093dc252307118e93267c1b144f988a5 not found: ID does not exist" containerID="0238c7aa6202cad00b10bf4099cbe3a1093dc252307118e93267c1b144f988a5" Oct 06 14:13:59 crc kubenswrapper[4757]: I1006 14:13:59.943933 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0238c7aa6202cad00b10bf4099cbe3a1093dc252307118e93267c1b144f988a5"} err="failed to get container status \"0238c7aa6202cad00b10bf4099cbe3a1093dc252307118e93267c1b144f988a5\": rpc error: code = NotFound desc = could not find container \"0238c7aa6202cad00b10bf4099cbe3a1093dc252307118e93267c1b144f988a5\": container with ID starting with 0238c7aa6202cad00b10bf4099cbe3a1093dc252307118e93267c1b144f988a5 not found: ID does not exist" Oct 06 14:14:00 crc kubenswrapper[4757]: I1006 14:14:00.214826 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e67fe126-23b7-4891-bd7f-e14842cf1276" path="/var/lib/kubelet/pods/e67fe126-23b7-4891-bd7f-e14842cf1276/volumes" Oct 06 14:14:34 crc kubenswrapper[4757]: I1006 14:14:34.361288 4757 patch_prober.go:28] interesting pod/machine-config-daemon-7tb7h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 06 14:14:34 crc kubenswrapper[4757]: I1006 14:14:34.361957 4757 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 06 14:15:00 crc kubenswrapper[4757]: I1006 14:15:00.151388 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29329335-bckbw"] Oct 06 14:15:00 crc kubenswrapper[4757]: E1006 14:15:00.152956 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e67fe126-23b7-4891-bd7f-e14842cf1276" containerName="registry-server" Oct 06 14:15:00 crc kubenswrapper[4757]: I1006 14:15:00.152980 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="e67fe126-23b7-4891-bd7f-e14842cf1276" containerName="registry-server" Oct 06 14:15:00 crc kubenswrapper[4757]: E1006 14:15:00.152998 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8601701e-46e7-4e9e-b816-41ca2cb6feec" containerName="extract-content" Oct 06 14:15:00 crc kubenswrapper[4757]: I1006 14:15:00.153006 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="8601701e-46e7-4e9e-b816-41ca2cb6feec" containerName="extract-content" Oct 06 14:15:00 crc kubenswrapper[4757]: E1006 14:15:00.153019 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e67fe126-23b7-4891-bd7f-e14842cf1276" containerName="extract-utilities" Oct 06 14:15:00 crc kubenswrapper[4757]: I1006 14:15:00.153030 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="e67fe126-23b7-4891-bd7f-e14842cf1276" containerName="extract-utilities" Oct 06 14:15:00 crc kubenswrapper[4757]: E1006 14:15:00.153043 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8601701e-46e7-4e9e-b816-41ca2cb6feec" containerName="extract-utilities" Oct 06 14:15:00 crc kubenswrapper[4757]: I1006 14:15:00.153051 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="8601701e-46e7-4e9e-b816-41ca2cb6feec" containerName="extract-utilities" Oct 06 14:15:00 crc kubenswrapper[4757]: E1006 14:15:00.153074 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8601701e-46e7-4e9e-b816-41ca2cb6feec" containerName="registry-server" Oct 06 14:15:00 crc kubenswrapper[4757]: I1006 14:15:00.153085 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="8601701e-46e7-4e9e-b816-41ca2cb6feec" containerName="registry-server" Oct 06 14:15:00 crc kubenswrapper[4757]: E1006 14:15:00.153129 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e67fe126-23b7-4891-bd7f-e14842cf1276" containerName="extract-content" Oct 06 14:15:00 crc kubenswrapper[4757]: I1006 14:15:00.153140 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="e67fe126-23b7-4891-bd7f-e14842cf1276" containerName="extract-content" Oct 06 14:15:00 crc kubenswrapper[4757]: I1006 14:15:00.153355 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="e67fe126-23b7-4891-bd7f-e14842cf1276" containerName="registry-server" Oct 06 14:15:00 crc kubenswrapper[4757]: I1006 14:15:00.153390 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="8601701e-46e7-4e9e-b816-41ca2cb6feec" containerName="registry-server" Oct 06 14:15:00 crc kubenswrapper[4757]: I1006 14:15:00.154055 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29329335-bckbw" Oct 06 14:15:00 crc kubenswrapper[4757]: I1006 14:15:00.163457 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 06 14:15:00 crc kubenswrapper[4757]: I1006 14:15:00.163603 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 06 14:15:00 crc kubenswrapper[4757]: I1006 14:15:00.176756 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29329335-bckbw"] Oct 06 14:15:00 crc kubenswrapper[4757]: I1006 14:15:00.330311 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rxw59\" (UniqueName: \"kubernetes.io/projected/08b0a55f-e859-4942-b4c3-438f06cfabc8-kube-api-access-rxw59\") pod \"collect-profiles-29329335-bckbw\" (UID: \"08b0a55f-e859-4942-b4c3-438f06cfabc8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29329335-bckbw" Oct 06 14:15:00 crc kubenswrapper[4757]: I1006 14:15:00.330423 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/08b0a55f-e859-4942-b4c3-438f06cfabc8-secret-volume\") pod \"collect-profiles-29329335-bckbw\" (UID: \"08b0a55f-e859-4942-b4c3-438f06cfabc8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29329335-bckbw" Oct 06 14:15:00 crc kubenswrapper[4757]: I1006 14:15:00.330449 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/08b0a55f-e859-4942-b4c3-438f06cfabc8-config-volume\") pod \"collect-profiles-29329335-bckbw\" (UID: \"08b0a55f-e859-4942-b4c3-438f06cfabc8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29329335-bckbw" Oct 06 14:15:00 crc kubenswrapper[4757]: I1006 14:15:00.431818 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rxw59\" (UniqueName: \"kubernetes.io/projected/08b0a55f-e859-4942-b4c3-438f06cfabc8-kube-api-access-rxw59\") pod \"collect-profiles-29329335-bckbw\" (UID: \"08b0a55f-e859-4942-b4c3-438f06cfabc8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29329335-bckbw" Oct 06 14:15:00 crc kubenswrapper[4757]: I1006 14:15:00.431928 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/08b0a55f-e859-4942-b4c3-438f06cfabc8-secret-volume\") pod \"collect-profiles-29329335-bckbw\" (UID: \"08b0a55f-e859-4942-b4c3-438f06cfabc8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29329335-bckbw" Oct 06 14:15:00 crc kubenswrapper[4757]: I1006 14:15:00.431963 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/08b0a55f-e859-4942-b4c3-438f06cfabc8-config-volume\") pod \"collect-profiles-29329335-bckbw\" (UID: \"08b0a55f-e859-4942-b4c3-438f06cfabc8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29329335-bckbw" Oct 06 14:15:00 crc kubenswrapper[4757]: I1006 14:15:00.432953 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/08b0a55f-e859-4942-b4c3-438f06cfabc8-config-volume\") pod \"collect-profiles-29329335-bckbw\" (UID: \"08b0a55f-e859-4942-b4c3-438f06cfabc8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29329335-bckbw" Oct 06 14:15:00 crc kubenswrapper[4757]: I1006 14:15:00.446292 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/08b0a55f-e859-4942-b4c3-438f06cfabc8-secret-volume\") pod \"collect-profiles-29329335-bckbw\" (UID: \"08b0a55f-e859-4942-b4c3-438f06cfabc8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29329335-bckbw" Oct 06 14:15:00 crc kubenswrapper[4757]: I1006 14:15:00.455230 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rxw59\" (UniqueName: \"kubernetes.io/projected/08b0a55f-e859-4942-b4c3-438f06cfabc8-kube-api-access-rxw59\") pod \"collect-profiles-29329335-bckbw\" (UID: \"08b0a55f-e859-4942-b4c3-438f06cfabc8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29329335-bckbw" Oct 06 14:15:00 crc kubenswrapper[4757]: I1006 14:15:00.482060 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29329335-bckbw" Oct 06 14:15:00 crc kubenswrapper[4757]: I1006 14:15:00.899031 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29329335-bckbw"] Oct 06 14:15:00 crc kubenswrapper[4757]: W1006 14:15:00.906020 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod08b0a55f_e859_4942_b4c3_438f06cfabc8.slice/crio-f3d63b866520faaa274144a99b0adc4a806c34d6f5e93a3ea9862feb4623f02b WatchSource:0}: Error finding container f3d63b866520faaa274144a99b0adc4a806c34d6f5e93a3ea9862feb4623f02b: Status 404 returned error can't find the container with id f3d63b866520faaa274144a99b0adc4a806c34d6f5e93a3ea9862feb4623f02b Oct 06 14:15:01 crc kubenswrapper[4757]: I1006 14:15:01.356175 4757 generic.go:334] "Generic (PLEG): container finished" podID="08b0a55f-e859-4942-b4c3-438f06cfabc8" containerID="863f7f23ac6fdd7c290d83cb3070f8f0e6f10ccdb3df54377ffade28ecd84f7e" exitCode=0 Oct 06 14:15:01 crc kubenswrapper[4757]: I1006 14:15:01.356414 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29329335-bckbw" event={"ID":"08b0a55f-e859-4942-b4c3-438f06cfabc8","Type":"ContainerDied","Data":"863f7f23ac6fdd7c290d83cb3070f8f0e6f10ccdb3df54377ffade28ecd84f7e"} Oct 06 14:15:01 crc kubenswrapper[4757]: I1006 14:15:01.356452 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29329335-bckbw" event={"ID":"08b0a55f-e859-4942-b4c3-438f06cfabc8","Type":"ContainerStarted","Data":"f3d63b866520faaa274144a99b0adc4a806c34d6f5e93a3ea9862feb4623f02b"} Oct 06 14:15:02 crc kubenswrapper[4757]: I1006 14:15:02.646236 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29329335-bckbw" Oct 06 14:15:02 crc kubenswrapper[4757]: I1006 14:15:02.765352 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rxw59\" (UniqueName: \"kubernetes.io/projected/08b0a55f-e859-4942-b4c3-438f06cfabc8-kube-api-access-rxw59\") pod \"08b0a55f-e859-4942-b4c3-438f06cfabc8\" (UID: \"08b0a55f-e859-4942-b4c3-438f06cfabc8\") " Oct 06 14:15:02 crc kubenswrapper[4757]: I1006 14:15:02.765430 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/08b0a55f-e859-4942-b4c3-438f06cfabc8-config-volume\") pod \"08b0a55f-e859-4942-b4c3-438f06cfabc8\" (UID: \"08b0a55f-e859-4942-b4c3-438f06cfabc8\") " Oct 06 14:15:02 crc kubenswrapper[4757]: I1006 14:15:02.765541 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/08b0a55f-e859-4942-b4c3-438f06cfabc8-secret-volume\") pod \"08b0a55f-e859-4942-b4c3-438f06cfabc8\" (UID: \"08b0a55f-e859-4942-b4c3-438f06cfabc8\") " Oct 06 14:15:02 crc kubenswrapper[4757]: I1006 14:15:02.766577 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/08b0a55f-e859-4942-b4c3-438f06cfabc8-config-volume" (OuterVolumeSpecName: "config-volume") pod "08b0a55f-e859-4942-b4c3-438f06cfabc8" (UID: "08b0a55f-e859-4942-b4c3-438f06cfabc8"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 14:15:02 crc kubenswrapper[4757]: I1006 14:15:02.771386 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/08b0a55f-e859-4942-b4c3-438f06cfabc8-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "08b0a55f-e859-4942-b4c3-438f06cfabc8" (UID: "08b0a55f-e859-4942-b4c3-438f06cfabc8"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 14:15:02 crc kubenswrapper[4757]: I1006 14:15:02.771402 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/08b0a55f-e859-4942-b4c3-438f06cfabc8-kube-api-access-rxw59" (OuterVolumeSpecName: "kube-api-access-rxw59") pod "08b0a55f-e859-4942-b4c3-438f06cfabc8" (UID: "08b0a55f-e859-4942-b4c3-438f06cfabc8"). InnerVolumeSpecName "kube-api-access-rxw59". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 14:15:02 crc kubenswrapper[4757]: I1006 14:15:02.868295 4757 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/08b0a55f-e859-4942-b4c3-438f06cfabc8-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 06 14:15:02 crc kubenswrapper[4757]: I1006 14:15:02.868350 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rxw59\" (UniqueName: \"kubernetes.io/projected/08b0a55f-e859-4942-b4c3-438f06cfabc8-kube-api-access-rxw59\") on node \"crc\" DevicePath \"\"" Oct 06 14:15:02 crc kubenswrapper[4757]: I1006 14:15:02.868370 4757 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/08b0a55f-e859-4942-b4c3-438f06cfabc8-config-volume\") on node \"crc\" DevicePath \"\"" Oct 06 14:15:03 crc kubenswrapper[4757]: I1006 14:15:03.374480 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29329335-bckbw" event={"ID":"08b0a55f-e859-4942-b4c3-438f06cfabc8","Type":"ContainerDied","Data":"f3d63b866520faaa274144a99b0adc4a806c34d6f5e93a3ea9862feb4623f02b"} Oct 06 14:15:03 crc kubenswrapper[4757]: I1006 14:15:03.374957 4757 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f3d63b866520faaa274144a99b0adc4a806c34d6f5e93a3ea9862feb4623f02b" Oct 06 14:15:03 crc kubenswrapper[4757]: I1006 14:15:03.374593 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29329335-bckbw" Oct 06 14:15:03 crc kubenswrapper[4757]: I1006 14:15:03.751016 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29329290-rk7ds"] Oct 06 14:15:03 crc kubenswrapper[4757]: I1006 14:15:03.793811 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29329290-rk7ds"] Oct 06 14:15:04 crc kubenswrapper[4757]: I1006 14:15:04.190006 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7b93821f-dfad-44a5-a217-eb63987c1f0a" path="/var/lib/kubelet/pods/7b93821f-dfad-44a5-a217-eb63987c1f0a/volumes" Oct 06 14:15:04 crc kubenswrapper[4757]: I1006 14:15:04.361503 4757 patch_prober.go:28] interesting pod/machine-config-daemon-7tb7h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 06 14:15:04 crc kubenswrapper[4757]: I1006 14:15:04.361578 4757 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 06 14:15:34 crc kubenswrapper[4757]: I1006 14:15:34.361182 4757 patch_prober.go:28] interesting pod/machine-config-daemon-7tb7h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 06 14:15:34 crc kubenswrapper[4757]: I1006 14:15:34.361890 4757 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 06 14:15:34 crc kubenswrapper[4757]: I1006 14:15:34.361976 4757 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" Oct 06 14:15:34 crc kubenswrapper[4757]: I1006 14:15:34.363022 4757 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"8e7fc26afed7cf734cb73a10dced2cf51322263c35999336621028eb82b97c97"} pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 06 14:15:34 crc kubenswrapper[4757]: I1006 14:15:34.363213 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" containerName="machine-config-daemon" containerID="cri-o://8e7fc26afed7cf734cb73a10dced2cf51322263c35999336621028eb82b97c97" gracePeriod=600 Oct 06 14:15:34 crc kubenswrapper[4757]: E1006 14:15:34.489344 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:15:34 crc kubenswrapper[4757]: I1006 14:15:34.624853 4757 generic.go:334] "Generic (PLEG): container finished" podID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" containerID="8e7fc26afed7cf734cb73a10dced2cf51322263c35999336621028eb82b97c97" exitCode=0 Oct 06 14:15:34 crc kubenswrapper[4757]: I1006 14:15:34.624898 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" event={"ID":"0010c888-d5ad-4b2b-8309-1647fdf0dee3","Type":"ContainerDied","Data":"8e7fc26afed7cf734cb73a10dced2cf51322263c35999336621028eb82b97c97"} Oct 06 14:15:34 crc kubenswrapper[4757]: I1006 14:15:34.624934 4757 scope.go:117] "RemoveContainer" containerID="7350ebc344678632d779b5daf23386ef70b7c699653aaea28f8e806f04c9879f" Oct 06 14:15:34 crc kubenswrapper[4757]: I1006 14:15:34.625442 4757 scope.go:117] "RemoveContainer" containerID="8e7fc26afed7cf734cb73a10dced2cf51322263c35999336621028eb82b97c97" Oct 06 14:15:34 crc kubenswrapper[4757]: E1006 14:15:34.625774 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:15:36 crc kubenswrapper[4757]: I1006 14:15:36.170472 4757 scope.go:117] "RemoveContainer" containerID="c9cdccf51ed3ffabdec5ba8c55257e4cd788195638213e89980e5436c5f7d857" Oct 06 14:15:45 crc kubenswrapper[4757]: I1006 14:15:45.180916 4757 scope.go:117] "RemoveContainer" containerID="8e7fc26afed7cf734cb73a10dced2cf51322263c35999336621028eb82b97c97" Oct 06 14:15:45 crc kubenswrapper[4757]: E1006 14:15:45.182175 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:16:00 crc kubenswrapper[4757]: I1006 14:16:00.180523 4757 scope.go:117] "RemoveContainer" containerID="8e7fc26afed7cf734cb73a10dced2cf51322263c35999336621028eb82b97c97" Oct 06 14:16:00 crc kubenswrapper[4757]: E1006 14:16:00.181221 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:16:14 crc kubenswrapper[4757]: I1006 14:16:14.180509 4757 scope.go:117] "RemoveContainer" containerID="8e7fc26afed7cf734cb73a10dced2cf51322263c35999336621028eb82b97c97" Oct 06 14:16:14 crc kubenswrapper[4757]: E1006 14:16:14.181407 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:16:25 crc kubenswrapper[4757]: I1006 14:16:25.180934 4757 scope.go:117] "RemoveContainer" containerID="8e7fc26afed7cf734cb73a10dced2cf51322263c35999336621028eb82b97c97" Oct 06 14:16:25 crc kubenswrapper[4757]: E1006 14:16:25.181979 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:16:38 crc kubenswrapper[4757]: I1006 14:16:38.180973 4757 scope.go:117] "RemoveContainer" containerID="8e7fc26afed7cf734cb73a10dced2cf51322263c35999336621028eb82b97c97" Oct 06 14:16:38 crc kubenswrapper[4757]: E1006 14:16:38.182351 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:16:51 crc kubenswrapper[4757]: I1006 14:16:51.180694 4757 scope.go:117] "RemoveContainer" containerID="8e7fc26afed7cf734cb73a10dced2cf51322263c35999336621028eb82b97c97" Oct 06 14:16:51 crc kubenswrapper[4757]: E1006 14:16:51.182782 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:17:05 crc kubenswrapper[4757]: I1006 14:17:05.180205 4757 scope.go:117] "RemoveContainer" containerID="8e7fc26afed7cf734cb73a10dced2cf51322263c35999336621028eb82b97c97" Oct 06 14:17:05 crc kubenswrapper[4757]: E1006 14:17:05.181240 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:17:17 crc kubenswrapper[4757]: I1006 14:17:17.179996 4757 scope.go:117] "RemoveContainer" containerID="8e7fc26afed7cf734cb73a10dced2cf51322263c35999336621028eb82b97c97" Oct 06 14:17:17 crc kubenswrapper[4757]: E1006 14:17:17.180971 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:17:29 crc kubenswrapper[4757]: I1006 14:17:29.179731 4757 scope.go:117] "RemoveContainer" containerID="8e7fc26afed7cf734cb73a10dced2cf51322263c35999336621028eb82b97c97" Oct 06 14:17:29 crc kubenswrapper[4757]: E1006 14:17:29.180424 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:17:44 crc kubenswrapper[4757]: I1006 14:17:44.180001 4757 scope.go:117] "RemoveContainer" containerID="8e7fc26afed7cf734cb73a10dced2cf51322263c35999336621028eb82b97c97" Oct 06 14:17:44 crc kubenswrapper[4757]: E1006 14:17:44.181235 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:17:58 crc kubenswrapper[4757]: I1006 14:17:58.180317 4757 scope.go:117] "RemoveContainer" containerID="8e7fc26afed7cf734cb73a10dced2cf51322263c35999336621028eb82b97c97" Oct 06 14:17:58 crc kubenswrapper[4757]: E1006 14:17:58.181715 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:18:11 crc kubenswrapper[4757]: I1006 14:18:11.180637 4757 scope.go:117] "RemoveContainer" containerID="8e7fc26afed7cf734cb73a10dced2cf51322263c35999336621028eb82b97c97" Oct 06 14:18:11 crc kubenswrapper[4757]: E1006 14:18:11.181537 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:18:24 crc kubenswrapper[4757]: I1006 14:18:24.182253 4757 scope.go:117] "RemoveContainer" containerID="8e7fc26afed7cf734cb73a10dced2cf51322263c35999336621028eb82b97c97" Oct 06 14:18:24 crc kubenswrapper[4757]: E1006 14:18:24.184607 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:18:35 crc kubenswrapper[4757]: I1006 14:18:35.180507 4757 scope.go:117] "RemoveContainer" containerID="8e7fc26afed7cf734cb73a10dced2cf51322263c35999336621028eb82b97c97" Oct 06 14:18:35 crc kubenswrapper[4757]: E1006 14:18:35.181746 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:18:48 crc kubenswrapper[4757]: I1006 14:18:48.181055 4757 scope.go:117] "RemoveContainer" containerID="8e7fc26afed7cf734cb73a10dced2cf51322263c35999336621028eb82b97c97" Oct 06 14:18:48 crc kubenswrapper[4757]: E1006 14:18:48.182426 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:19:03 crc kubenswrapper[4757]: I1006 14:19:03.179887 4757 scope.go:117] "RemoveContainer" containerID="8e7fc26afed7cf734cb73a10dced2cf51322263c35999336621028eb82b97c97" Oct 06 14:19:03 crc kubenswrapper[4757]: E1006 14:19:03.180571 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:19:17 crc kubenswrapper[4757]: I1006 14:19:17.180549 4757 scope.go:117] "RemoveContainer" containerID="8e7fc26afed7cf734cb73a10dced2cf51322263c35999336621028eb82b97c97" Oct 06 14:19:17 crc kubenswrapper[4757]: E1006 14:19:17.181479 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:19:28 crc kubenswrapper[4757]: I1006 14:19:28.180513 4757 scope.go:117] "RemoveContainer" containerID="8e7fc26afed7cf734cb73a10dced2cf51322263c35999336621028eb82b97c97" Oct 06 14:19:28 crc kubenswrapper[4757]: E1006 14:19:28.181538 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:19:43 crc kubenswrapper[4757]: I1006 14:19:43.180791 4757 scope.go:117] "RemoveContainer" containerID="8e7fc26afed7cf734cb73a10dced2cf51322263c35999336621028eb82b97c97" Oct 06 14:19:43 crc kubenswrapper[4757]: E1006 14:19:43.181858 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:19:56 crc kubenswrapper[4757]: I1006 14:19:56.180418 4757 scope.go:117] "RemoveContainer" containerID="8e7fc26afed7cf734cb73a10dced2cf51322263c35999336621028eb82b97c97" Oct 06 14:19:56 crc kubenswrapper[4757]: E1006 14:19:56.182329 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:20:08 crc kubenswrapper[4757]: I1006 14:20:08.179922 4757 scope.go:117] "RemoveContainer" containerID="8e7fc26afed7cf734cb73a10dced2cf51322263c35999336621028eb82b97c97" Oct 06 14:20:08 crc kubenswrapper[4757]: E1006 14:20:08.180692 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:20:19 crc kubenswrapper[4757]: I1006 14:20:19.180837 4757 scope.go:117] "RemoveContainer" containerID="8e7fc26afed7cf734cb73a10dced2cf51322263c35999336621028eb82b97c97" Oct 06 14:20:19 crc kubenswrapper[4757]: E1006 14:20:19.181837 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:20:34 crc kubenswrapper[4757]: I1006 14:20:34.180509 4757 scope.go:117] "RemoveContainer" containerID="8e7fc26afed7cf734cb73a10dced2cf51322263c35999336621028eb82b97c97" Oct 06 14:20:34 crc kubenswrapper[4757]: E1006 14:20:34.181923 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:20:38 crc kubenswrapper[4757]: I1006 14:20:38.055757 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-kb4lq"] Oct 06 14:20:38 crc kubenswrapper[4757]: E1006 14:20:38.056626 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08b0a55f-e859-4942-b4c3-438f06cfabc8" containerName="collect-profiles" Oct 06 14:20:38 crc kubenswrapper[4757]: I1006 14:20:38.056649 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="08b0a55f-e859-4942-b4c3-438f06cfabc8" containerName="collect-profiles" Oct 06 14:20:38 crc kubenswrapper[4757]: I1006 14:20:38.056923 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="08b0a55f-e859-4942-b4c3-438f06cfabc8" containerName="collect-profiles" Oct 06 14:20:38 crc kubenswrapper[4757]: I1006 14:20:38.058333 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kb4lq" Oct 06 14:20:38 crc kubenswrapper[4757]: I1006 14:20:38.062956 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-kb4lq"] Oct 06 14:20:38 crc kubenswrapper[4757]: I1006 14:20:38.225773 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l4xqf\" (UniqueName: \"kubernetes.io/projected/0f4f9c7a-f45d-4e8b-89aa-75ea68315c79-kube-api-access-l4xqf\") pod \"redhat-marketplace-kb4lq\" (UID: \"0f4f9c7a-f45d-4e8b-89aa-75ea68315c79\") " pod="openshift-marketplace/redhat-marketplace-kb4lq" Oct 06 14:20:38 crc kubenswrapper[4757]: I1006 14:20:38.225853 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0f4f9c7a-f45d-4e8b-89aa-75ea68315c79-utilities\") pod \"redhat-marketplace-kb4lq\" (UID: \"0f4f9c7a-f45d-4e8b-89aa-75ea68315c79\") " pod="openshift-marketplace/redhat-marketplace-kb4lq" Oct 06 14:20:38 crc kubenswrapper[4757]: I1006 14:20:38.225928 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0f4f9c7a-f45d-4e8b-89aa-75ea68315c79-catalog-content\") pod \"redhat-marketplace-kb4lq\" (UID: \"0f4f9c7a-f45d-4e8b-89aa-75ea68315c79\") " pod="openshift-marketplace/redhat-marketplace-kb4lq" Oct 06 14:20:38 crc kubenswrapper[4757]: I1006 14:20:38.327396 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0f4f9c7a-f45d-4e8b-89aa-75ea68315c79-utilities\") pod \"redhat-marketplace-kb4lq\" (UID: \"0f4f9c7a-f45d-4e8b-89aa-75ea68315c79\") " pod="openshift-marketplace/redhat-marketplace-kb4lq" Oct 06 14:20:38 crc kubenswrapper[4757]: I1006 14:20:38.327514 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0f4f9c7a-f45d-4e8b-89aa-75ea68315c79-catalog-content\") pod \"redhat-marketplace-kb4lq\" (UID: \"0f4f9c7a-f45d-4e8b-89aa-75ea68315c79\") " pod="openshift-marketplace/redhat-marketplace-kb4lq" Oct 06 14:20:38 crc kubenswrapper[4757]: I1006 14:20:38.327545 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l4xqf\" (UniqueName: \"kubernetes.io/projected/0f4f9c7a-f45d-4e8b-89aa-75ea68315c79-kube-api-access-l4xqf\") pod \"redhat-marketplace-kb4lq\" (UID: \"0f4f9c7a-f45d-4e8b-89aa-75ea68315c79\") " pod="openshift-marketplace/redhat-marketplace-kb4lq" Oct 06 14:20:38 crc kubenswrapper[4757]: I1006 14:20:38.327959 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0f4f9c7a-f45d-4e8b-89aa-75ea68315c79-utilities\") pod \"redhat-marketplace-kb4lq\" (UID: \"0f4f9c7a-f45d-4e8b-89aa-75ea68315c79\") " pod="openshift-marketplace/redhat-marketplace-kb4lq" Oct 06 14:20:38 crc kubenswrapper[4757]: I1006 14:20:38.328216 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0f4f9c7a-f45d-4e8b-89aa-75ea68315c79-catalog-content\") pod \"redhat-marketplace-kb4lq\" (UID: \"0f4f9c7a-f45d-4e8b-89aa-75ea68315c79\") " pod="openshift-marketplace/redhat-marketplace-kb4lq" Oct 06 14:20:38 crc kubenswrapper[4757]: I1006 14:20:38.347644 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l4xqf\" (UniqueName: \"kubernetes.io/projected/0f4f9c7a-f45d-4e8b-89aa-75ea68315c79-kube-api-access-l4xqf\") pod \"redhat-marketplace-kb4lq\" (UID: \"0f4f9c7a-f45d-4e8b-89aa-75ea68315c79\") " pod="openshift-marketplace/redhat-marketplace-kb4lq" Oct 06 14:20:38 crc kubenswrapper[4757]: I1006 14:20:38.383548 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kb4lq" Oct 06 14:20:38 crc kubenswrapper[4757]: I1006 14:20:38.817671 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-kb4lq"] Oct 06 14:20:39 crc kubenswrapper[4757]: I1006 14:20:39.156300 4757 generic.go:334] "Generic (PLEG): container finished" podID="0f4f9c7a-f45d-4e8b-89aa-75ea68315c79" containerID="6bf2ad5be723cd7435194643507daf38148e03212faa72257e8cb88adce226d8" exitCode=0 Oct 06 14:20:39 crc kubenswrapper[4757]: I1006 14:20:39.156357 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kb4lq" event={"ID":"0f4f9c7a-f45d-4e8b-89aa-75ea68315c79","Type":"ContainerDied","Data":"6bf2ad5be723cd7435194643507daf38148e03212faa72257e8cb88adce226d8"} Oct 06 14:20:39 crc kubenswrapper[4757]: I1006 14:20:39.156389 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kb4lq" event={"ID":"0f4f9c7a-f45d-4e8b-89aa-75ea68315c79","Type":"ContainerStarted","Data":"3ff0a4acea5ccfa560404ecc5e9ff58357e0f7754a63b859d615c17ca4003839"} Oct 06 14:20:39 crc kubenswrapper[4757]: I1006 14:20:39.159748 4757 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 06 14:20:40 crc kubenswrapper[4757]: I1006 14:20:40.169195 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kb4lq" event={"ID":"0f4f9c7a-f45d-4e8b-89aa-75ea68315c79","Type":"ContainerStarted","Data":"78923be99eb6958aed2ed4e73e9a3f971259b9884f59f0c563843ba0c85772d7"} Oct 06 14:20:41 crc kubenswrapper[4757]: I1006 14:20:41.180616 4757 generic.go:334] "Generic (PLEG): container finished" podID="0f4f9c7a-f45d-4e8b-89aa-75ea68315c79" containerID="78923be99eb6958aed2ed4e73e9a3f971259b9884f59f0c563843ba0c85772d7" exitCode=0 Oct 06 14:20:41 crc kubenswrapper[4757]: I1006 14:20:41.180676 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kb4lq" event={"ID":"0f4f9c7a-f45d-4e8b-89aa-75ea68315c79","Type":"ContainerDied","Data":"78923be99eb6958aed2ed4e73e9a3f971259b9884f59f0c563843ba0c85772d7"} Oct 06 14:20:42 crc kubenswrapper[4757]: I1006 14:20:42.191959 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kb4lq" event={"ID":"0f4f9c7a-f45d-4e8b-89aa-75ea68315c79","Type":"ContainerStarted","Data":"ea74b6f4d7a06333223b482f46336172915959bd80e9a34ce17a2930d13c8f7c"} Oct 06 14:20:42 crc kubenswrapper[4757]: I1006 14:20:42.219686 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-kb4lq" podStartSLOduration=1.5540719840000001 podStartE2EDuration="4.219665749s" podCreationTimestamp="2025-10-06 14:20:38 +0000 UTC" firstStartedPulling="2025-10-06 14:20:39.159400282 +0000 UTC m=+2527.656718829" lastFinishedPulling="2025-10-06 14:20:41.824994037 +0000 UTC m=+2530.322312594" observedRunningTime="2025-10-06 14:20:42.218778092 +0000 UTC m=+2530.716096639" watchObservedRunningTime="2025-10-06 14:20:42.219665749 +0000 UTC m=+2530.716984286" Oct 06 14:20:45 crc kubenswrapper[4757]: I1006 14:20:45.635407 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-ph72x"] Oct 06 14:20:45 crc kubenswrapper[4757]: I1006 14:20:45.637361 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ph72x" Oct 06 14:20:45 crc kubenswrapper[4757]: I1006 14:20:45.644640 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-ph72x"] Oct 06 14:20:45 crc kubenswrapper[4757]: I1006 14:20:45.836263 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/da902388-6f72-4fc4-9a8e-da911b563ecb-utilities\") pod \"redhat-operators-ph72x\" (UID: \"da902388-6f72-4fc4-9a8e-da911b563ecb\") " pod="openshift-marketplace/redhat-operators-ph72x" Oct 06 14:20:45 crc kubenswrapper[4757]: I1006 14:20:45.836844 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/da902388-6f72-4fc4-9a8e-da911b563ecb-catalog-content\") pod \"redhat-operators-ph72x\" (UID: \"da902388-6f72-4fc4-9a8e-da911b563ecb\") " pod="openshift-marketplace/redhat-operators-ph72x" Oct 06 14:20:45 crc kubenswrapper[4757]: I1006 14:20:45.836893 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d7jjk\" (UniqueName: \"kubernetes.io/projected/da902388-6f72-4fc4-9a8e-da911b563ecb-kube-api-access-d7jjk\") pod \"redhat-operators-ph72x\" (UID: \"da902388-6f72-4fc4-9a8e-da911b563ecb\") " pod="openshift-marketplace/redhat-operators-ph72x" Oct 06 14:20:45 crc kubenswrapper[4757]: I1006 14:20:45.937977 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/da902388-6f72-4fc4-9a8e-da911b563ecb-utilities\") pod \"redhat-operators-ph72x\" (UID: \"da902388-6f72-4fc4-9a8e-da911b563ecb\") " pod="openshift-marketplace/redhat-operators-ph72x" Oct 06 14:20:45 crc kubenswrapper[4757]: I1006 14:20:45.938054 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/da902388-6f72-4fc4-9a8e-da911b563ecb-catalog-content\") pod \"redhat-operators-ph72x\" (UID: \"da902388-6f72-4fc4-9a8e-da911b563ecb\") " pod="openshift-marketplace/redhat-operators-ph72x" Oct 06 14:20:45 crc kubenswrapper[4757]: I1006 14:20:45.938076 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d7jjk\" (UniqueName: \"kubernetes.io/projected/da902388-6f72-4fc4-9a8e-da911b563ecb-kube-api-access-d7jjk\") pod \"redhat-operators-ph72x\" (UID: \"da902388-6f72-4fc4-9a8e-da911b563ecb\") " pod="openshift-marketplace/redhat-operators-ph72x" Oct 06 14:20:45 crc kubenswrapper[4757]: I1006 14:20:45.938478 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/da902388-6f72-4fc4-9a8e-da911b563ecb-utilities\") pod \"redhat-operators-ph72x\" (UID: \"da902388-6f72-4fc4-9a8e-da911b563ecb\") " pod="openshift-marketplace/redhat-operators-ph72x" Oct 06 14:20:45 crc kubenswrapper[4757]: I1006 14:20:45.938652 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/da902388-6f72-4fc4-9a8e-da911b563ecb-catalog-content\") pod \"redhat-operators-ph72x\" (UID: \"da902388-6f72-4fc4-9a8e-da911b563ecb\") " pod="openshift-marketplace/redhat-operators-ph72x" Oct 06 14:20:45 crc kubenswrapper[4757]: I1006 14:20:45.959160 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d7jjk\" (UniqueName: \"kubernetes.io/projected/da902388-6f72-4fc4-9a8e-da911b563ecb-kube-api-access-d7jjk\") pod \"redhat-operators-ph72x\" (UID: \"da902388-6f72-4fc4-9a8e-da911b563ecb\") " pod="openshift-marketplace/redhat-operators-ph72x" Oct 06 14:20:46 crc kubenswrapper[4757]: I1006 14:20:46.000170 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ph72x" Oct 06 14:20:46 crc kubenswrapper[4757]: I1006 14:20:46.179619 4757 scope.go:117] "RemoveContainer" containerID="8e7fc26afed7cf734cb73a10dced2cf51322263c35999336621028eb82b97c97" Oct 06 14:20:46 crc kubenswrapper[4757]: I1006 14:20:46.415571 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-ph72x"] Oct 06 14:20:46 crc kubenswrapper[4757]: W1006 14:20:46.424409 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podda902388_6f72_4fc4_9a8e_da911b563ecb.slice/crio-3913afe58179e4e9bbee96ae5ce4477a005e7ed362f34ee0a665d2f64ef3c749 WatchSource:0}: Error finding container 3913afe58179e4e9bbee96ae5ce4477a005e7ed362f34ee0a665d2f64ef3c749: Status 404 returned error can't find the container with id 3913afe58179e4e9bbee96ae5ce4477a005e7ed362f34ee0a665d2f64ef3c749 Oct 06 14:20:47 crc kubenswrapper[4757]: I1006 14:20:47.232718 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" event={"ID":"0010c888-d5ad-4b2b-8309-1647fdf0dee3","Type":"ContainerStarted","Data":"0a0da14d5df97879a51ed20c7b64fac97bf680cd8013e0a132c0db3c8e231bd7"} Oct 06 14:20:47 crc kubenswrapper[4757]: I1006 14:20:47.235952 4757 generic.go:334] "Generic (PLEG): container finished" podID="da902388-6f72-4fc4-9a8e-da911b563ecb" containerID="06d9df2fc7551c46ca1cd470dd56fc68f1b58360e23321c0f474fb68ef90278e" exitCode=0 Oct 06 14:20:47 crc kubenswrapper[4757]: I1006 14:20:47.236218 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ph72x" event={"ID":"da902388-6f72-4fc4-9a8e-da911b563ecb","Type":"ContainerDied","Data":"06d9df2fc7551c46ca1cd470dd56fc68f1b58360e23321c0f474fb68ef90278e"} Oct 06 14:20:47 crc kubenswrapper[4757]: I1006 14:20:47.236294 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ph72x" event={"ID":"da902388-6f72-4fc4-9a8e-da911b563ecb","Type":"ContainerStarted","Data":"3913afe58179e4e9bbee96ae5ce4477a005e7ed362f34ee0a665d2f64ef3c749"} Oct 06 14:20:48 crc kubenswrapper[4757]: I1006 14:20:48.384103 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-kb4lq" Oct 06 14:20:48 crc kubenswrapper[4757]: I1006 14:20:48.384395 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-kb4lq" Oct 06 14:20:48 crc kubenswrapper[4757]: I1006 14:20:48.426316 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-kb4lq" Oct 06 14:20:49 crc kubenswrapper[4757]: I1006 14:20:49.292432 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-kb4lq" Oct 06 14:20:51 crc kubenswrapper[4757]: I1006 14:20:51.025373 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-kb4lq"] Oct 06 14:20:51 crc kubenswrapper[4757]: I1006 14:20:51.267572 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-kb4lq" podUID="0f4f9c7a-f45d-4e8b-89aa-75ea68315c79" containerName="registry-server" containerID="cri-o://ea74b6f4d7a06333223b482f46336172915959bd80e9a34ce17a2930d13c8f7c" gracePeriod=2 Oct 06 14:20:52 crc kubenswrapper[4757]: I1006 14:20:52.278788 4757 generic.go:334] "Generic (PLEG): container finished" podID="0f4f9c7a-f45d-4e8b-89aa-75ea68315c79" containerID="ea74b6f4d7a06333223b482f46336172915959bd80e9a34ce17a2930d13c8f7c" exitCode=0 Oct 06 14:20:52 crc kubenswrapper[4757]: I1006 14:20:52.278938 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kb4lq" event={"ID":"0f4f9c7a-f45d-4e8b-89aa-75ea68315c79","Type":"ContainerDied","Data":"ea74b6f4d7a06333223b482f46336172915959bd80e9a34ce17a2930d13c8f7c"} Oct 06 14:20:53 crc kubenswrapper[4757]: I1006 14:20:53.785723 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kb4lq" Oct 06 14:20:53 crc kubenswrapper[4757]: I1006 14:20:53.880935 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l4xqf\" (UniqueName: \"kubernetes.io/projected/0f4f9c7a-f45d-4e8b-89aa-75ea68315c79-kube-api-access-l4xqf\") pod \"0f4f9c7a-f45d-4e8b-89aa-75ea68315c79\" (UID: \"0f4f9c7a-f45d-4e8b-89aa-75ea68315c79\") " Oct 06 14:20:53 crc kubenswrapper[4757]: I1006 14:20:53.881261 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0f4f9c7a-f45d-4e8b-89aa-75ea68315c79-utilities\") pod \"0f4f9c7a-f45d-4e8b-89aa-75ea68315c79\" (UID: \"0f4f9c7a-f45d-4e8b-89aa-75ea68315c79\") " Oct 06 14:20:53 crc kubenswrapper[4757]: I1006 14:20:53.881376 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0f4f9c7a-f45d-4e8b-89aa-75ea68315c79-catalog-content\") pod \"0f4f9c7a-f45d-4e8b-89aa-75ea68315c79\" (UID: \"0f4f9c7a-f45d-4e8b-89aa-75ea68315c79\") " Oct 06 14:20:53 crc kubenswrapper[4757]: I1006 14:20:53.882602 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0f4f9c7a-f45d-4e8b-89aa-75ea68315c79-utilities" (OuterVolumeSpecName: "utilities") pod "0f4f9c7a-f45d-4e8b-89aa-75ea68315c79" (UID: "0f4f9c7a-f45d-4e8b-89aa-75ea68315c79"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 14:20:53 crc kubenswrapper[4757]: I1006 14:20:53.890690 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0f4f9c7a-f45d-4e8b-89aa-75ea68315c79-kube-api-access-l4xqf" (OuterVolumeSpecName: "kube-api-access-l4xqf") pod "0f4f9c7a-f45d-4e8b-89aa-75ea68315c79" (UID: "0f4f9c7a-f45d-4e8b-89aa-75ea68315c79"). InnerVolumeSpecName "kube-api-access-l4xqf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 14:20:53 crc kubenswrapper[4757]: I1006 14:20:53.896221 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0f4f9c7a-f45d-4e8b-89aa-75ea68315c79-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0f4f9c7a-f45d-4e8b-89aa-75ea68315c79" (UID: "0f4f9c7a-f45d-4e8b-89aa-75ea68315c79"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 14:20:53 crc kubenswrapper[4757]: I1006 14:20:53.983114 4757 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0f4f9c7a-f45d-4e8b-89aa-75ea68315c79-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 06 14:20:53 crc kubenswrapper[4757]: I1006 14:20:53.983145 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l4xqf\" (UniqueName: \"kubernetes.io/projected/0f4f9c7a-f45d-4e8b-89aa-75ea68315c79-kube-api-access-l4xqf\") on node \"crc\" DevicePath \"\"" Oct 06 14:20:53 crc kubenswrapper[4757]: I1006 14:20:53.983154 4757 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0f4f9c7a-f45d-4e8b-89aa-75ea68315c79-utilities\") on node \"crc\" DevicePath \"\"" Oct 06 14:20:54 crc kubenswrapper[4757]: I1006 14:20:54.294868 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kb4lq" Oct 06 14:20:54 crc kubenswrapper[4757]: I1006 14:20:54.294862 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kb4lq" event={"ID":"0f4f9c7a-f45d-4e8b-89aa-75ea68315c79","Type":"ContainerDied","Data":"3ff0a4acea5ccfa560404ecc5e9ff58357e0f7754a63b859d615c17ca4003839"} Oct 06 14:20:54 crc kubenswrapper[4757]: I1006 14:20:54.295050 4757 scope.go:117] "RemoveContainer" containerID="ea74b6f4d7a06333223b482f46336172915959bd80e9a34ce17a2930d13c8f7c" Oct 06 14:20:54 crc kubenswrapper[4757]: I1006 14:20:54.298906 4757 generic.go:334] "Generic (PLEG): container finished" podID="da902388-6f72-4fc4-9a8e-da911b563ecb" containerID="35d2f8b1ab524513b2fed965846d0eb7bc7c06151427eee568232cfe6778ae62" exitCode=0 Oct 06 14:20:54 crc kubenswrapper[4757]: I1006 14:20:54.298944 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ph72x" event={"ID":"da902388-6f72-4fc4-9a8e-da911b563ecb","Type":"ContainerDied","Data":"35d2f8b1ab524513b2fed965846d0eb7bc7c06151427eee568232cfe6778ae62"} Oct 06 14:20:54 crc kubenswrapper[4757]: I1006 14:20:54.317306 4757 scope.go:117] "RemoveContainer" containerID="78923be99eb6958aed2ed4e73e9a3f971259b9884f59f0c563843ba0c85772d7" Oct 06 14:20:54 crc kubenswrapper[4757]: I1006 14:20:54.352691 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-kb4lq"] Oct 06 14:20:54 crc kubenswrapper[4757]: I1006 14:20:54.356067 4757 scope.go:117] "RemoveContainer" containerID="6bf2ad5be723cd7435194643507daf38148e03212faa72257e8cb88adce226d8" Oct 06 14:20:54 crc kubenswrapper[4757]: I1006 14:20:54.358996 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-kb4lq"] Oct 06 14:20:55 crc kubenswrapper[4757]: I1006 14:20:55.311273 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ph72x" event={"ID":"da902388-6f72-4fc4-9a8e-da911b563ecb","Type":"ContainerStarted","Data":"9a45f5c90bc3912bb7988f4300105e9d14921dddacf1c918a9117d19e73ceac0"} Oct 06 14:20:55 crc kubenswrapper[4757]: I1006 14:20:55.327161 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-ph72x" podStartSLOduration=2.723266725 podStartE2EDuration="10.327134663s" podCreationTimestamp="2025-10-06 14:20:45 +0000 UTC" firstStartedPulling="2025-10-06 14:20:47.238913144 +0000 UTC m=+2535.736231681" lastFinishedPulling="2025-10-06 14:20:54.842781082 +0000 UTC m=+2543.340099619" observedRunningTime="2025-10-06 14:20:55.325800111 +0000 UTC m=+2543.823118698" watchObservedRunningTime="2025-10-06 14:20:55.327134663 +0000 UTC m=+2543.824453240" Oct 06 14:20:56 crc kubenswrapper[4757]: I1006 14:20:56.000916 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-ph72x" Oct 06 14:20:56 crc kubenswrapper[4757]: I1006 14:20:56.001270 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-ph72x" Oct 06 14:20:56 crc kubenswrapper[4757]: I1006 14:20:56.188554 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0f4f9c7a-f45d-4e8b-89aa-75ea68315c79" path="/var/lib/kubelet/pods/0f4f9c7a-f45d-4e8b-89aa-75ea68315c79/volumes" Oct 06 14:20:57 crc kubenswrapper[4757]: I1006 14:20:57.057182 4757 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-ph72x" podUID="da902388-6f72-4fc4-9a8e-da911b563ecb" containerName="registry-server" probeResult="failure" output=< Oct 06 14:20:57 crc kubenswrapper[4757]: timeout: failed to connect service ":50051" within 1s Oct 06 14:20:57 crc kubenswrapper[4757]: > Oct 06 14:21:06 crc kubenswrapper[4757]: I1006 14:21:06.051342 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-ph72x" Oct 06 14:21:06 crc kubenswrapper[4757]: I1006 14:21:06.101661 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-ph72x" Oct 06 14:21:06 crc kubenswrapper[4757]: I1006 14:21:06.162159 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-ph72x"] Oct 06 14:21:06 crc kubenswrapper[4757]: I1006 14:21:06.290126 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-cwdkh"] Oct 06 14:21:06 crc kubenswrapper[4757]: I1006 14:21:06.290684 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-cwdkh" podUID="56126440-030a-4d73-a36b-41ed7d44c23c" containerName="registry-server" containerID="cri-o://3fc416aa9574b7702744760e9f2df9a32740fe16c4d223526bea8d069ba5f7fd" gracePeriod=2 Oct 06 14:21:06 crc kubenswrapper[4757]: I1006 14:21:06.715281 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cwdkh" Oct 06 14:21:06 crc kubenswrapper[4757]: I1006 14:21:06.865915 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vgv5q\" (UniqueName: \"kubernetes.io/projected/56126440-030a-4d73-a36b-41ed7d44c23c-kube-api-access-vgv5q\") pod \"56126440-030a-4d73-a36b-41ed7d44c23c\" (UID: \"56126440-030a-4d73-a36b-41ed7d44c23c\") " Oct 06 14:21:06 crc kubenswrapper[4757]: I1006 14:21:06.865985 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/56126440-030a-4d73-a36b-41ed7d44c23c-catalog-content\") pod \"56126440-030a-4d73-a36b-41ed7d44c23c\" (UID: \"56126440-030a-4d73-a36b-41ed7d44c23c\") " Oct 06 14:21:06 crc kubenswrapper[4757]: I1006 14:21:06.866124 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/56126440-030a-4d73-a36b-41ed7d44c23c-utilities\") pod \"56126440-030a-4d73-a36b-41ed7d44c23c\" (UID: \"56126440-030a-4d73-a36b-41ed7d44c23c\") " Oct 06 14:21:06 crc kubenswrapper[4757]: I1006 14:21:06.866922 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/56126440-030a-4d73-a36b-41ed7d44c23c-utilities" (OuterVolumeSpecName: "utilities") pod "56126440-030a-4d73-a36b-41ed7d44c23c" (UID: "56126440-030a-4d73-a36b-41ed7d44c23c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 14:21:06 crc kubenswrapper[4757]: I1006 14:21:06.878436 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/56126440-030a-4d73-a36b-41ed7d44c23c-kube-api-access-vgv5q" (OuterVolumeSpecName: "kube-api-access-vgv5q") pod "56126440-030a-4d73-a36b-41ed7d44c23c" (UID: "56126440-030a-4d73-a36b-41ed7d44c23c"). InnerVolumeSpecName "kube-api-access-vgv5q". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 14:21:06 crc kubenswrapper[4757]: I1006 14:21:06.952790 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/56126440-030a-4d73-a36b-41ed7d44c23c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "56126440-030a-4d73-a36b-41ed7d44c23c" (UID: "56126440-030a-4d73-a36b-41ed7d44c23c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 14:21:06 crc kubenswrapper[4757]: I1006 14:21:06.968069 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vgv5q\" (UniqueName: \"kubernetes.io/projected/56126440-030a-4d73-a36b-41ed7d44c23c-kube-api-access-vgv5q\") on node \"crc\" DevicePath \"\"" Oct 06 14:21:06 crc kubenswrapper[4757]: I1006 14:21:06.968128 4757 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/56126440-030a-4d73-a36b-41ed7d44c23c-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 06 14:21:06 crc kubenswrapper[4757]: I1006 14:21:06.968140 4757 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/56126440-030a-4d73-a36b-41ed7d44c23c-utilities\") on node \"crc\" DevicePath \"\"" Oct 06 14:21:07 crc kubenswrapper[4757]: I1006 14:21:07.407225 4757 generic.go:334] "Generic (PLEG): container finished" podID="56126440-030a-4d73-a36b-41ed7d44c23c" containerID="3fc416aa9574b7702744760e9f2df9a32740fe16c4d223526bea8d069ba5f7fd" exitCode=0 Oct 06 14:21:07 crc kubenswrapper[4757]: I1006 14:21:07.407301 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cwdkh" Oct 06 14:21:07 crc kubenswrapper[4757]: I1006 14:21:07.407323 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cwdkh" event={"ID":"56126440-030a-4d73-a36b-41ed7d44c23c","Type":"ContainerDied","Data":"3fc416aa9574b7702744760e9f2df9a32740fe16c4d223526bea8d069ba5f7fd"} Oct 06 14:21:07 crc kubenswrapper[4757]: I1006 14:21:07.408541 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cwdkh" event={"ID":"56126440-030a-4d73-a36b-41ed7d44c23c","Type":"ContainerDied","Data":"697cb2aea2c0cd79672bcc4c9dd5b1a4143e5b30461bfe05a7ee753ed608caae"} Oct 06 14:21:07 crc kubenswrapper[4757]: I1006 14:21:07.408568 4757 scope.go:117] "RemoveContainer" containerID="3fc416aa9574b7702744760e9f2df9a32740fe16c4d223526bea8d069ba5f7fd" Oct 06 14:21:07 crc kubenswrapper[4757]: I1006 14:21:07.426784 4757 scope.go:117] "RemoveContainer" containerID="687f5e55231eea5f3eb76845c1c82b36f4afad618df922002b0785fbecf77207" Oct 06 14:21:07 crc kubenswrapper[4757]: I1006 14:21:07.448804 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-cwdkh"] Oct 06 14:21:07 crc kubenswrapper[4757]: I1006 14:21:07.457025 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-cwdkh"] Oct 06 14:21:07 crc kubenswrapper[4757]: I1006 14:21:07.465561 4757 scope.go:117] "RemoveContainer" containerID="7591358e928d0943b4e09cd2c7a08834bb39f2916ee86d1fd4cb5c623c5d5e28" Oct 06 14:21:07 crc kubenswrapper[4757]: I1006 14:21:07.500811 4757 scope.go:117] "RemoveContainer" containerID="3fc416aa9574b7702744760e9f2df9a32740fe16c4d223526bea8d069ba5f7fd" Oct 06 14:21:07 crc kubenswrapper[4757]: E1006 14:21:07.501296 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3fc416aa9574b7702744760e9f2df9a32740fe16c4d223526bea8d069ba5f7fd\": container with ID starting with 3fc416aa9574b7702744760e9f2df9a32740fe16c4d223526bea8d069ba5f7fd not found: ID does not exist" containerID="3fc416aa9574b7702744760e9f2df9a32740fe16c4d223526bea8d069ba5f7fd" Oct 06 14:21:07 crc kubenswrapper[4757]: I1006 14:21:07.501338 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3fc416aa9574b7702744760e9f2df9a32740fe16c4d223526bea8d069ba5f7fd"} err="failed to get container status \"3fc416aa9574b7702744760e9f2df9a32740fe16c4d223526bea8d069ba5f7fd\": rpc error: code = NotFound desc = could not find container \"3fc416aa9574b7702744760e9f2df9a32740fe16c4d223526bea8d069ba5f7fd\": container with ID starting with 3fc416aa9574b7702744760e9f2df9a32740fe16c4d223526bea8d069ba5f7fd not found: ID does not exist" Oct 06 14:21:07 crc kubenswrapper[4757]: I1006 14:21:07.501365 4757 scope.go:117] "RemoveContainer" containerID="687f5e55231eea5f3eb76845c1c82b36f4afad618df922002b0785fbecf77207" Oct 06 14:21:07 crc kubenswrapper[4757]: E1006 14:21:07.501660 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"687f5e55231eea5f3eb76845c1c82b36f4afad618df922002b0785fbecf77207\": container with ID starting with 687f5e55231eea5f3eb76845c1c82b36f4afad618df922002b0785fbecf77207 not found: ID does not exist" containerID="687f5e55231eea5f3eb76845c1c82b36f4afad618df922002b0785fbecf77207" Oct 06 14:21:07 crc kubenswrapper[4757]: I1006 14:21:07.501780 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"687f5e55231eea5f3eb76845c1c82b36f4afad618df922002b0785fbecf77207"} err="failed to get container status \"687f5e55231eea5f3eb76845c1c82b36f4afad618df922002b0785fbecf77207\": rpc error: code = NotFound desc = could not find container \"687f5e55231eea5f3eb76845c1c82b36f4afad618df922002b0785fbecf77207\": container with ID starting with 687f5e55231eea5f3eb76845c1c82b36f4afad618df922002b0785fbecf77207 not found: ID does not exist" Oct 06 14:21:07 crc kubenswrapper[4757]: I1006 14:21:07.502181 4757 scope.go:117] "RemoveContainer" containerID="7591358e928d0943b4e09cd2c7a08834bb39f2916ee86d1fd4cb5c623c5d5e28" Oct 06 14:21:07 crc kubenswrapper[4757]: E1006 14:21:07.502577 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7591358e928d0943b4e09cd2c7a08834bb39f2916ee86d1fd4cb5c623c5d5e28\": container with ID starting with 7591358e928d0943b4e09cd2c7a08834bb39f2916ee86d1fd4cb5c623c5d5e28 not found: ID does not exist" containerID="7591358e928d0943b4e09cd2c7a08834bb39f2916ee86d1fd4cb5c623c5d5e28" Oct 06 14:21:07 crc kubenswrapper[4757]: I1006 14:21:07.502665 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7591358e928d0943b4e09cd2c7a08834bb39f2916ee86d1fd4cb5c623c5d5e28"} err="failed to get container status \"7591358e928d0943b4e09cd2c7a08834bb39f2916ee86d1fd4cb5c623c5d5e28\": rpc error: code = NotFound desc = could not find container \"7591358e928d0943b4e09cd2c7a08834bb39f2916ee86d1fd4cb5c623c5d5e28\": container with ID starting with 7591358e928d0943b4e09cd2c7a08834bb39f2916ee86d1fd4cb5c623c5d5e28 not found: ID does not exist" Oct 06 14:21:08 crc kubenswrapper[4757]: I1006 14:21:08.188168 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="56126440-030a-4d73-a36b-41ed7d44c23c" path="/var/lib/kubelet/pods/56126440-030a-4d73-a36b-41ed7d44c23c/volumes" Oct 06 14:23:04 crc kubenswrapper[4757]: I1006 14:23:04.361446 4757 patch_prober.go:28] interesting pod/machine-config-daemon-7tb7h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 06 14:23:04 crc kubenswrapper[4757]: I1006 14:23:04.362003 4757 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 06 14:23:34 crc kubenswrapper[4757]: I1006 14:23:34.361574 4757 patch_prober.go:28] interesting pod/machine-config-daemon-7tb7h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 06 14:23:34 crc kubenswrapper[4757]: I1006 14:23:34.362046 4757 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 06 14:23:50 crc kubenswrapper[4757]: I1006 14:23:50.849307 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-d2gpb"] Oct 06 14:23:50 crc kubenswrapper[4757]: E1006 14:23:50.850170 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="56126440-030a-4d73-a36b-41ed7d44c23c" containerName="extract-content" Oct 06 14:23:50 crc kubenswrapper[4757]: I1006 14:23:50.850187 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="56126440-030a-4d73-a36b-41ed7d44c23c" containerName="extract-content" Oct 06 14:23:50 crc kubenswrapper[4757]: E1006 14:23:50.850398 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0f4f9c7a-f45d-4e8b-89aa-75ea68315c79" containerName="registry-server" Oct 06 14:23:50 crc kubenswrapper[4757]: I1006 14:23:50.850407 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="0f4f9c7a-f45d-4e8b-89aa-75ea68315c79" containerName="registry-server" Oct 06 14:23:50 crc kubenswrapper[4757]: E1006 14:23:50.850423 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="56126440-030a-4d73-a36b-41ed7d44c23c" containerName="extract-utilities" Oct 06 14:23:50 crc kubenswrapper[4757]: I1006 14:23:50.850433 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="56126440-030a-4d73-a36b-41ed7d44c23c" containerName="extract-utilities" Oct 06 14:23:50 crc kubenswrapper[4757]: E1006 14:23:50.850445 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0f4f9c7a-f45d-4e8b-89aa-75ea68315c79" containerName="extract-content" Oct 06 14:23:50 crc kubenswrapper[4757]: I1006 14:23:50.850452 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="0f4f9c7a-f45d-4e8b-89aa-75ea68315c79" containerName="extract-content" Oct 06 14:23:50 crc kubenswrapper[4757]: E1006 14:23:50.850473 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="56126440-030a-4d73-a36b-41ed7d44c23c" containerName="registry-server" Oct 06 14:23:50 crc kubenswrapper[4757]: I1006 14:23:50.850480 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="56126440-030a-4d73-a36b-41ed7d44c23c" containerName="registry-server" Oct 06 14:23:50 crc kubenswrapper[4757]: E1006 14:23:50.850491 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0f4f9c7a-f45d-4e8b-89aa-75ea68315c79" containerName="extract-utilities" Oct 06 14:23:50 crc kubenswrapper[4757]: I1006 14:23:50.850498 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="0f4f9c7a-f45d-4e8b-89aa-75ea68315c79" containerName="extract-utilities" Oct 06 14:23:50 crc kubenswrapper[4757]: I1006 14:23:50.850678 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="0f4f9c7a-f45d-4e8b-89aa-75ea68315c79" containerName="registry-server" Oct 06 14:23:50 crc kubenswrapper[4757]: I1006 14:23:50.850711 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="56126440-030a-4d73-a36b-41ed7d44c23c" containerName="registry-server" Oct 06 14:23:50 crc kubenswrapper[4757]: I1006 14:23:50.852695 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-d2gpb" Oct 06 14:23:50 crc kubenswrapper[4757]: I1006 14:23:50.870790 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-d2gpb"] Oct 06 14:23:50 crc kubenswrapper[4757]: I1006 14:23:50.982648 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kfv7c\" (UniqueName: \"kubernetes.io/projected/3c992ff8-9e2c-4a19-bf89-89669bd42152-kube-api-access-kfv7c\") pod \"community-operators-d2gpb\" (UID: \"3c992ff8-9e2c-4a19-bf89-89669bd42152\") " pod="openshift-marketplace/community-operators-d2gpb" Oct 06 14:23:50 crc kubenswrapper[4757]: I1006 14:23:50.982709 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3c992ff8-9e2c-4a19-bf89-89669bd42152-utilities\") pod \"community-operators-d2gpb\" (UID: \"3c992ff8-9e2c-4a19-bf89-89669bd42152\") " pod="openshift-marketplace/community-operators-d2gpb" Oct 06 14:23:50 crc kubenswrapper[4757]: I1006 14:23:50.982739 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3c992ff8-9e2c-4a19-bf89-89669bd42152-catalog-content\") pod \"community-operators-d2gpb\" (UID: \"3c992ff8-9e2c-4a19-bf89-89669bd42152\") " pod="openshift-marketplace/community-operators-d2gpb" Oct 06 14:23:51 crc kubenswrapper[4757]: I1006 14:23:51.084597 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kfv7c\" (UniqueName: \"kubernetes.io/projected/3c992ff8-9e2c-4a19-bf89-89669bd42152-kube-api-access-kfv7c\") pod \"community-operators-d2gpb\" (UID: \"3c992ff8-9e2c-4a19-bf89-89669bd42152\") " pod="openshift-marketplace/community-operators-d2gpb" Oct 06 14:23:51 crc kubenswrapper[4757]: I1006 14:23:51.084856 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3c992ff8-9e2c-4a19-bf89-89669bd42152-utilities\") pod \"community-operators-d2gpb\" (UID: \"3c992ff8-9e2c-4a19-bf89-89669bd42152\") " pod="openshift-marketplace/community-operators-d2gpb" Oct 06 14:23:51 crc kubenswrapper[4757]: I1006 14:23:51.084943 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3c992ff8-9e2c-4a19-bf89-89669bd42152-catalog-content\") pod \"community-operators-d2gpb\" (UID: \"3c992ff8-9e2c-4a19-bf89-89669bd42152\") " pod="openshift-marketplace/community-operators-d2gpb" Oct 06 14:23:51 crc kubenswrapper[4757]: I1006 14:23:51.085600 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3c992ff8-9e2c-4a19-bf89-89669bd42152-utilities\") pod \"community-operators-d2gpb\" (UID: \"3c992ff8-9e2c-4a19-bf89-89669bd42152\") " pod="openshift-marketplace/community-operators-d2gpb" Oct 06 14:23:51 crc kubenswrapper[4757]: I1006 14:23:51.085627 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3c992ff8-9e2c-4a19-bf89-89669bd42152-catalog-content\") pod \"community-operators-d2gpb\" (UID: \"3c992ff8-9e2c-4a19-bf89-89669bd42152\") " pod="openshift-marketplace/community-operators-d2gpb" Oct 06 14:23:51 crc kubenswrapper[4757]: I1006 14:23:51.104829 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kfv7c\" (UniqueName: \"kubernetes.io/projected/3c992ff8-9e2c-4a19-bf89-89669bd42152-kube-api-access-kfv7c\") pod \"community-operators-d2gpb\" (UID: \"3c992ff8-9e2c-4a19-bf89-89669bd42152\") " pod="openshift-marketplace/community-operators-d2gpb" Oct 06 14:23:51 crc kubenswrapper[4757]: I1006 14:23:51.181039 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-d2gpb" Oct 06 14:23:51 crc kubenswrapper[4757]: I1006 14:23:51.629597 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-d2gpb"] Oct 06 14:23:51 crc kubenswrapper[4757]: I1006 14:23:51.820125 4757 generic.go:334] "Generic (PLEG): container finished" podID="3c992ff8-9e2c-4a19-bf89-89669bd42152" containerID="6a6b4b019cc3fff6a9dc189915099a307260277dac0877c8aecffdb1b21e3a54" exitCode=0 Oct 06 14:23:51 crc kubenswrapper[4757]: I1006 14:23:51.820171 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-d2gpb" event={"ID":"3c992ff8-9e2c-4a19-bf89-89669bd42152","Type":"ContainerDied","Data":"6a6b4b019cc3fff6a9dc189915099a307260277dac0877c8aecffdb1b21e3a54"} Oct 06 14:23:51 crc kubenswrapper[4757]: I1006 14:23:51.820197 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-d2gpb" event={"ID":"3c992ff8-9e2c-4a19-bf89-89669bd42152","Type":"ContainerStarted","Data":"47b7d2f57da4691dc8c0a7ca89bd612bbbf138f3c626fff51681ec004e256a5b"} Oct 06 14:23:52 crc kubenswrapper[4757]: I1006 14:23:52.829045 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-d2gpb" event={"ID":"3c992ff8-9e2c-4a19-bf89-89669bd42152","Type":"ContainerStarted","Data":"d07e196e6329e9116efa4fb975598dd9091071d479d3737c6ad43403918b0609"} Oct 06 14:23:53 crc kubenswrapper[4757]: I1006 14:23:53.840312 4757 generic.go:334] "Generic (PLEG): container finished" podID="3c992ff8-9e2c-4a19-bf89-89669bd42152" containerID="d07e196e6329e9116efa4fb975598dd9091071d479d3737c6ad43403918b0609" exitCode=0 Oct 06 14:23:53 crc kubenswrapper[4757]: I1006 14:23:53.840383 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-d2gpb" event={"ID":"3c992ff8-9e2c-4a19-bf89-89669bd42152","Type":"ContainerDied","Data":"d07e196e6329e9116efa4fb975598dd9091071d479d3737c6ad43403918b0609"} Oct 06 14:23:54 crc kubenswrapper[4757]: I1006 14:23:54.856768 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-d2gpb" event={"ID":"3c992ff8-9e2c-4a19-bf89-89669bd42152","Type":"ContainerStarted","Data":"026a029c5902c47f5938fc3b07ad97d211eac7cd75e092321ae4a2f20e86b4c9"} Oct 06 14:23:58 crc kubenswrapper[4757]: I1006 14:23:58.410807 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-d2gpb" podStartSLOduration=5.896778738 podStartE2EDuration="8.410785319s" podCreationTimestamp="2025-10-06 14:23:50 +0000 UTC" firstStartedPulling="2025-10-06 14:23:51.821416187 +0000 UTC m=+2720.318734724" lastFinishedPulling="2025-10-06 14:23:54.335422758 +0000 UTC m=+2722.832741305" observedRunningTime="2025-10-06 14:23:54.874721183 +0000 UTC m=+2723.372039750" watchObservedRunningTime="2025-10-06 14:23:58.410785319 +0000 UTC m=+2726.908103866" Oct 06 14:23:58 crc kubenswrapper[4757]: I1006 14:23:58.411945 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-5ctws"] Oct 06 14:23:58 crc kubenswrapper[4757]: I1006 14:23:58.413667 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5ctws" Oct 06 14:23:58 crc kubenswrapper[4757]: I1006 14:23:58.426319 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-5ctws"] Oct 06 14:23:58 crc kubenswrapper[4757]: I1006 14:23:58.597364 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bc03633f-fb13-433b-a4a5-6cf688d6d60e-catalog-content\") pod \"certified-operators-5ctws\" (UID: \"bc03633f-fb13-433b-a4a5-6cf688d6d60e\") " pod="openshift-marketplace/certified-operators-5ctws" Oct 06 14:23:58 crc kubenswrapper[4757]: I1006 14:23:58.597456 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gvgkt\" (UniqueName: \"kubernetes.io/projected/bc03633f-fb13-433b-a4a5-6cf688d6d60e-kube-api-access-gvgkt\") pod \"certified-operators-5ctws\" (UID: \"bc03633f-fb13-433b-a4a5-6cf688d6d60e\") " pod="openshift-marketplace/certified-operators-5ctws" Oct 06 14:23:58 crc kubenswrapper[4757]: I1006 14:23:58.597678 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bc03633f-fb13-433b-a4a5-6cf688d6d60e-utilities\") pod \"certified-operators-5ctws\" (UID: \"bc03633f-fb13-433b-a4a5-6cf688d6d60e\") " pod="openshift-marketplace/certified-operators-5ctws" Oct 06 14:23:58 crc kubenswrapper[4757]: I1006 14:23:58.699442 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bc03633f-fb13-433b-a4a5-6cf688d6d60e-catalog-content\") pod \"certified-operators-5ctws\" (UID: \"bc03633f-fb13-433b-a4a5-6cf688d6d60e\") " pod="openshift-marketplace/certified-operators-5ctws" Oct 06 14:23:58 crc kubenswrapper[4757]: I1006 14:23:58.699574 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gvgkt\" (UniqueName: \"kubernetes.io/projected/bc03633f-fb13-433b-a4a5-6cf688d6d60e-kube-api-access-gvgkt\") pod \"certified-operators-5ctws\" (UID: \"bc03633f-fb13-433b-a4a5-6cf688d6d60e\") " pod="openshift-marketplace/certified-operators-5ctws" Oct 06 14:23:58 crc kubenswrapper[4757]: I1006 14:23:58.699683 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bc03633f-fb13-433b-a4a5-6cf688d6d60e-utilities\") pod \"certified-operators-5ctws\" (UID: \"bc03633f-fb13-433b-a4a5-6cf688d6d60e\") " pod="openshift-marketplace/certified-operators-5ctws" Oct 06 14:23:58 crc kubenswrapper[4757]: I1006 14:23:58.699979 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bc03633f-fb13-433b-a4a5-6cf688d6d60e-catalog-content\") pod \"certified-operators-5ctws\" (UID: \"bc03633f-fb13-433b-a4a5-6cf688d6d60e\") " pod="openshift-marketplace/certified-operators-5ctws" Oct 06 14:23:58 crc kubenswrapper[4757]: I1006 14:23:58.700336 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bc03633f-fb13-433b-a4a5-6cf688d6d60e-utilities\") pod \"certified-operators-5ctws\" (UID: \"bc03633f-fb13-433b-a4a5-6cf688d6d60e\") " pod="openshift-marketplace/certified-operators-5ctws" Oct 06 14:23:58 crc kubenswrapper[4757]: I1006 14:23:58.730037 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gvgkt\" (UniqueName: \"kubernetes.io/projected/bc03633f-fb13-433b-a4a5-6cf688d6d60e-kube-api-access-gvgkt\") pod \"certified-operators-5ctws\" (UID: \"bc03633f-fb13-433b-a4a5-6cf688d6d60e\") " pod="openshift-marketplace/certified-operators-5ctws" Oct 06 14:23:58 crc kubenswrapper[4757]: I1006 14:23:58.738726 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5ctws" Oct 06 14:23:59 crc kubenswrapper[4757]: I1006 14:23:59.233308 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-5ctws"] Oct 06 14:23:59 crc kubenswrapper[4757]: I1006 14:23:59.900768 4757 generic.go:334] "Generic (PLEG): container finished" podID="bc03633f-fb13-433b-a4a5-6cf688d6d60e" containerID="7885711728dabc8baae3b5bb976d57a7c1a2f5c39b76f1d9170f66a81aaa187c" exitCode=0 Oct 06 14:23:59 crc kubenswrapper[4757]: I1006 14:23:59.900865 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5ctws" event={"ID":"bc03633f-fb13-433b-a4a5-6cf688d6d60e","Type":"ContainerDied","Data":"7885711728dabc8baae3b5bb976d57a7c1a2f5c39b76f1d9170f66a81aaa187c"} Oct 06 14:23:59 crc kubenswrapper[4757]: I1006 14:23:59.900988 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5ctws" event={"ID":"bc03633f-fb13-433b-a4a5-6cf688d6d60e","Type":"ContainerStarted","Data":"ddd5c8989a61ae022dfeb89ddd608dff160ceabf26c73efde59705fb5d7213b9"} Oct 06 14:24:01 crc kubenswrapper[4757]: I1006 14:24:01.182174 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-d2gpb" Oct 06 14:24:01 crc kubenswrapper[4757]: I1006 14:24:01.182545 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-d2gpb" Oct 06 14:24:01 crc kubenswrapper[4757]: I1006 14:24:01.238947 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-d2gpb" Oct 06 14:24:01 crc kubenswrapper[4757]: I1006 14:24:01.964002 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-d2gpb" Oct 06 14:24:02 crc kubenswrapper[4757]: I1006 14:24:02.804852 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-d2gpb"] Oct 06 14:24:03 crc kubenswrapper[4757]: E1006 14:24:03.941029 4757 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbc03633f_fb13_433b_a4a5_6cf688d6d60e.slice/crio-4c05996cf289e93d678566768180e2e6399cd2622c59d31652ce1df95110d00e.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbc03633f_fb13_433b_a4a5_6cf688d6d60e.slice/crio-conmon-4c05996cf289e93d678566768180e2e6399cd2622c59d31652ce1df95110d00e.scope\": RecentStats: unable to find data in memory cache]" Oct 06 14:24:03 crc kubenswrapper[4757]: I1006 14:24:03.944004 4757 generic.go:334] "Generic (PLEG): container finished" podID="bc03633f-fb13-433b-a4a5-6cf688d6d60e" containerID="4c05996cf289e93d678566768180e2e6399cd2622c59d31652ce1df95110d00e" exitCode=0 Oct 06 14:24:03 crc kubenswrapper[4757]: I1006 14:24:03.944110 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5ctws" event={"ID":"bc03633f-fb13-433b-a4a5-6cf688d6d60e","Type":"ContainerDied","Data":"4c05996cf289e93d678566768180e2e6399cd2622c59d31652ce1df95110d00e"} Oct 06 14:24:03 crc kubenswrapper[4757]: I1006 14:24:03.946579 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-d2gpb" podUID="3c992ff8-9e2c-4a19-bf89-89669bd42152" containerName="registry-server" containerID="cri-o://026a029c5902c47f5938fc3b07ad97d211eac7cd75e092321ae4a2f20e86b4c9" gracePeriod=2 Oct 06 14:24:04 crc kubenswrapper[4757]: I1006 14:24:04.361630 4757 patch_prober.go:28] interesting pod/machine-config-daemon-7tb7h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 06 14:24:04 crc kubenswrapper[4757]: I1006 14:24:04.362065 4757 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 06 14:24:04 crc kubenswrapper[4757]: I1006 14:24:04.362136 4757 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" Oct 06 14:24:04 crc kubenswrapper[4757]: I1006 14:24:04.363032 4757 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"0a0da14d5df97879a51ed20c7b64fac97bf680cd8013e0a132c0db3c8e231bd7"} pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 06 14:24:04 crc kubenswrapper[4757]: I1006 14:24:04.363222 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" containerName="machine-config-daemon" containerID="cri-o://0a0da14d5df97879a51ed20c7b64fac97bf680cd8013e0a132c0db3c8e231bd7" gracePeriod=600 Oct 06 14:24:04 crc kubenswrapper[4757]: I1006 14:24:04.459894 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-d2gpb" Oct 06 14:24:04 crc kubenswrapper[4757]: I1006 14:24:04.586516 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfv7c\" (UniqueName: \"kubernetes.io/projected/3c992ff8-9e2c-4a19-bf89-89669bd42152-kube-api-access-kfv7c\") pod \"3c992ff8-9e2c-4a19-bf89-89669bd42152\" (UID: \"3c992ff8-9e2c-4a19-bf89-89669bd42152\") " Oct 06 14:24:04 crc kubenswrapper[4757]: I1006 14:24:04.586581 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3c992ff8-9e2c-4a19-bf89-89669bd42152-catalog-content\") pod \"3c992ff8-9e2c-4a19-bf89-89669bd42152\" (UID: \"3c992ff8-9e2c-4a19-bf89-89669bd42152\") " Oct 06 14:24:04 crc kubenswrapper[4757]: I1006 14:24:04.586661 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3c992ff8-9e2c-4a19-bf89-89669bd42152-utilities\") pod \"3c992ff8-9e2c-4a19-bf89-89669bd42152\" (UID: \"3c992ff8-9e2c-4a19-bf89-89669bd42152\") " Oct 06 14:24:04 crc kubenswrapper[4757]: I1006 14:24:04.587499 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3c992ff8-9e2c-4a19-bf89-89669bd42152-utilities" (OuterVolumeSpecName: "utilities") pod "3c992ff8-9e2c-4a19-bf89-89669bd42152" (UID: "3c992ff8-9e2c-4a19-bf89-89669bd42152"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 14:24:04 crc kubenswrapper[4757]: I1006 14:24:04.594488 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3c992ff8-9e2c-4a19-bf89-89669bd42152-kube-api-access-kfv7c" (OuterVolumeSpecName: "kube-api-access-kfv7c") pod "3c992ff8-9e2c-4a19-bf89-89669bd42152" (UID: "3c992ff8-9e2c-4a19-bf89-89669bd42152"). InnerVolumeSpecName "kube-api-access-kfv7c". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 14:24:04 crc kubenswrapper[4757]: I1006 14:24:04.637590 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3c992ff8-9e2c-4a19-bf89-89669bd42152-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3c992ff8-9e2c-4a19-bf89-89669bd42152" (UID: "3c992ff8-9e2c-4a19-bf89-89669bd42152"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 14:24:04 crc kubenswrapper[4757]: I1006 14:24:04.688284 4757 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3c992ff8-9e2c-4a19-bf89-89669bd42152-utilities\") on node \"crc\" DevicePath \"\"" Oct 06 14:24:04 crc kubenswrapper[4757]: I1006 14:24:04.688318 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfv7c\" (UniqueName: \"kubernetes.io/projected/3c992ff8-9e2c-4a19-bf89-89669bd42152-kube-api-access-kfv7c\") on node \"crc\" DevicePath \"\"" Oct 06 14:24:04 crc kubenswrapper[4757]: I1006 14:24:04.688329 4757 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3c992ff8-9e2c-4a19-bf89-89669bd42152-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 06 14:24:04 crc kubenswrapper[4757]: I1006 14:24:04.958313 4757 generic.go:334] "Generic (PLEG): container finished" podID="3c992ff8-9e2c-4a19-bf89-89669bd42152" containerID="026a029c5902c47f5938fc3b07ad97d211eac7cd75e092321ae4a2f20e86b4c9" exitCode=0 Oct 06 14:24:04 crc kubenswrapper[4757]: I1006 14:24:04.958406 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-d2gpb" Oct 06 14:24:04 crc kubenswrapper[4757]: I1006 14:24:04.958426 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-d2gpb" event={"ID":"3c992ff8-9e2c-4a19-bf89-89669bd42152","Type":"ContainerDied","Data":"026a029c5902c47f5938fc3b07ad97d211eac7cd75e092321ae4a2f20e86b4c9"} Oct 06 14:24:04 crc kubenswrapper[4757]: I1006 14:24:04.959027 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-d2gpb" event={"ID":"3c992ff8-9e2c-4a19-bf89-89669bd42152","Type":"ContainerDied","Data":"47b7d2f57da4691dc8c0a7ca89bd612bbbf138f3c626fff51681ec004e256a5b"} Oct 06 14:24:04 crc kubenswrapper[4757]: I1006 14:24:04.959068 4757 scope.go:117] "RemoveContainer" containerID="026a029c5902c47f5938fc3b07ad97d211eac7cd75e092321ae4a2f20e86b4c9" Oct 06 14:24:04 crc kubenswrapper[4757]: I1006 14:24:04.964307 4757 generic.go:334] "Generic (PLEG): container finished" podID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" containerID="0a0da14d5df97879a51ed20c7b64fac97bf680cd8013e0a132c0db3c8e231bd7" exitCode=0 Oct 06 14:24:04 crc kubenswrapper[4757]: I1006 14:24:04.964406 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" event={"ID":"0010c888-d5ad-4b2b-8309-1647fdf0dee3","Type":"ContainerDied","Data":"0a0da14d5df97879a51ed20c7b64fac97bf680cd8013e0a132c0db3c8e231bd7"} Oct 06 14:24:04 crc kubenswrapper[4757]: I1006 14:24:04.964492 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" event={"ID":"0010c888-d5ad-4b2b-8309-1647fdf0dee3","Type":"ContainerStarted","Data":"99be7ecd47883c550ee964aaf41717a2d5ba03ae000da549f5ab40bf9d61bd40"} Oct 06 14:24:04 crc kubenswrapper[4757]: I1006 14:24:04.967838 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5ctws" event={"ID":"bc03633f-fb13-433b-a4a5-6cf688d6d60e","Type":"ContainerStarted","Data":"e9b2a46c9530bfb3f360cb787d014b157ffb1a36d8829b6072452410792ab1cd"} Oct 06 14:24:05 crc kubenswrapper[4757]: I1006 14:24:05.020383 4757 scope.go:117] "RemoveContainer" containerID="d07e196e6329e9116efa4fb975598dd9091071d479d3737c6ad43403918b0609" Oct 06 14:24:05 crc kubenswrapper[4757]: I1006 14:24:05.032711 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-5ctws" podStartSLOduration=2.522138565 podStartE2EDuration="7.032687788s" podCreationTimestamp="2025-10-06 14:23:58 +0000 UTC" firstStartedPulling="2025-10-06 14:23:59.902429246 +0000 UTC m=+2728.399747793" lastFinishedPulling="2025-10-06 14:24:04.412978479 +0000 UTC m=+2732.910297016" observedRunningTime="2025-10-06 14:24:05.02269729 +0000 UTC m=+2733.520015847" watchObservedRunningTime="2025-10-06 14:24:05.032687788 +0000 UTC m=+2733.530006335" Oct 06 14:24:05 crc kubenswrapper[4757]: I1006 14:24:05.042427 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-d2gpb"] Oct 06 14:24:05 crc kubenswrapper[4757]: I1006 14:24:05.045893 4757 scope.go:117] "RemoveContainer" containerID="6a6b4b019cc3fff6a9dc189915099a307260277dac0877c8aecffdb1b21e3a54" Oct 06 14:24:05 crc kubenswrapper[4757]: I1006 14:24:05.050954 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-d2gpb"] Oct 06 14:24:05 crc kubenswrapper[4757]: I1006 14:24:05.079978 4757 scope.go:117] "RemoveContainer" containerID="026a029c5902c47f5938fc3b07ad97d211eac7cd75e092321ae4a2f20e86b4c9" Oct 06 14:24:05 crc kubenswrapper[4757]: E1006 14:24:05.080472 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"026a029c5902c47f5938fc3b07ad97d211eac7cd75e092321ae4a2f20e86b4c9\": container with ID starting with 026a029c5902c47f5938fc3b07ad97d211eac7cd75e092321ae4a2f20e86b4c9 not found: ID does not exist" containerID="026a029c5902c47f5938fc3b07ad97d211eac7cd75e092321ae4a2f20e86b4c9" Oct 06 14:24:05 crc kubenswrapper[4757]: I1006 14:24:05.080513 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"026a029c5902c47f5938fc3b07ad97d211eac7cd75e092321ae4a2f20e86b4c9"} err="failed to get container status \"026a029c5902c47f5938fc3b07ad97d211eac7cd75e092321ae4a2f20e86b4c9\": rpc error: code = NotFound desc = could not find container \"026a029c5902c47f5938fc3b07ad97d211eac7cd75e092321ae4a2f20e86b4c9\": container with ID starting with 026a029c5902c47f5938fc3b07ad97d211eac7cd75e092321ae4a2f20e86b4c9 not found: ID does not exist" Oct 06 14:24:05 crc kubenswrapper[4757]: I1006 14:24:05.080539 4757 scope.go:117] "RemoveContainer" containerID="d07e196e6329e9116efa4fb975598dd9091071d479d3737c6ad43403918b0609" Oct 06 14:24:05 crc kubenswrapper[4757]: E1006 14:24:05.080920 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d07e196e6329e9116efa4fb975598dd9091071d479d3737c6ad43403918b0609\": container with ID starting with d07e196e6329e9116efa4fb975598dd9091071d479d3737c6ad43403918b0609 not found: ID does not exist" containerID="d07e196e6329e9116efa4fb975598dd9091071d479d3737c6ad43403918b0609" Oct 06 14:24:05 crc kubenswrapper[4757]: I1006 14:24:05.080946 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d07e196e6329e9116efa4fb975598dd9091071d479d3737c6ad43403918b0609"} err="failed to get container status \"d07e196e6329e9116efa4fb975598dd9091071d479d3737c6ad43403918b0609\": rpc error: code = NotFound desc = could not find container \"d07e196e6329e9116efa4fb975598dd9091071d479d3737c6ad43403918b0609\": container with ID starting with d07e196e6329e9116efa4fb975598dd9091071d479d3737c6ad43403918b0609 not found: ID does not exist" Oct 06 14:24:05 crc kubenswrapper[4757]: I1006 14:24:05.080966 4757 scope.go:117] "RemoveContainer" containerID="6a6b4b019cc3fff6a9dc189915099a307260277dac0877c8aecffdb1b21e3a54" Oct 06 14:24:05 crc kubenswrapper[4757]: E1006 14:24:05.081290 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6a6b4b019cc3fff6a9dc189915099a307260277dac0877c8aecffdb1b21e3a54\": container with ID starting with 6a6b4b019cc3fff6a9dc189915099a307260277dac0877c8aecffdb1b21e3a54 not found: ID does not exist" containerID="6a6b4b019cc3fff6a9dc189915099a307260277dac0877c8aecffdb1b21e3a54" Oct 06 14:24:05 crc kubenswrapper[4757]: I1006 14:24:05.081316 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6a6b4b019cc3fff6a9dc189915099a307260277dac0877c8aecffdb1b21e3a54"} err="failed to get container status \"6a6b4b019cc3fff6a9dc189915099a307260277dac0877c8aecffdb1b21e3a54\": rpc error: code = NotFound desc = could not find container \"6a6b4b019cc3fff6a9dc189915099a307260277dac0877c8aecffdb1b21e3a54\": container with ID starting with 6a6b4b019cc3fff6a9dc189915099a307260277dac0877c8aecffdb1b21e3a54 not found: ID does not exist" Oct 06 14:24:05 crc kubenswrapper[4757]: I1006 14:24:05.081351 4757 scope.go:117] "RemoveContainer" containerID="8e7fc26afed7cf734cb73a10dced2cf51322263c35999336621028eb82b97c97" Oct 06 14:24:06 crc kubenswrapper[4757]: I1006 14:24:06.188942 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3c992ff8-9e2c-4a19-bf89-89669bd42152" path="/var/lib/kubelet/pods/3c992ff8-9e2c-4a19-bf89-89669bd42152/volumes" Oct 06 14:24:08 crc kubenswrapper[4757]: I1006 14:24:08.739400 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-5ctws" Oct 06 14:24:08 crc kubenswrapper[4757]: I1006 14:24:08.739818 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-5ctws" Oct 06 14:24:08 crc kubenswrapper[4757]: I1006 14:24:08.807991 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-5ctws" Oct 06 14:24:09 crc kubenswrapper[4757]: I1006 14:24:09.070468 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-5ctws" Oct 06 14:24:10 crc kubenswrapper[4757]: I1006 14:24:10.028247 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-5ctws"] Oct 06 14:24:10 crc kubenswrapper[4757]: I1006 14:24:10.203467 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-lmdhq"] Oct 06 14:24:10 crc kubenswrapper[4757]: I1006 14:24:10.203746 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-lmdhq" podUID="aee16345-631c-48f8-a83b-bd30a9c62c60" containerName="registry-server" containerID="cri-o://395402b79fab0ba65d1706879e6453121eced8fc1d8d13f3f9f65f772582de0d" gracePeriod=2 Oct 06 14:24:11 crc kubenswrapper[4757]: I1006 14:24:11.024501 4757 generic.go:334] "Generic (PLEG): container finished" podID="aee16345-631c-48f8-a83b-bd30a9c62c60" containerID="395402b79fab0ba65d1706879e6453121eced8fc1d8d13f3f9f65f772582de0d" exitCode=0 Oct 06 14:24:11 crc kubenswrapper[4757]: I1006 14:24:11.024586 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lmdhq" event={"ID":"aee16345-631c-48f8-a83b-bd30a9c62c60","Type":"ContainerDied","Data":"395402b79fab0ba65d1706879e6453121eced8fc1d8d13f3f9f65f772582de0d"} Oct 06 14:24:11 crc kubenswrapper[4757]: I1006 14:24:11.084823 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lmdhq" Oct 06 14:24:11 crc kubenswrapper[4757]: I1006 14:24:11.185854 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aee16345-631c-48f8-a83b-bd30a9c62c60-utilities\") pod \"aee16345-631c-48f8-a83b-bd30a9c62c60\" (UID: \"aee16345-631c-48f8-a83b-bd30a9c62c60\") " Oct 06 14:24:11 crc kubenswrapper[4757]: I1006 14:24:11.186003 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aee16345-631c-48f8-a83b-bd30a9c62c60-catalog-content\") pod \"aee16345-631c-48f8-a83b-bd30a9c62c60\" (UID: \"aee16345-631c-48f8-a83b-bd30a9c62c60\") " Oct 06 14:24:11 crc kubenswrapper[4757]: I1006 14:24:11.186083 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nq45n\" (UniqueName: \"kubernetes.io/projected/aee16345-631c-48f8-a83b-bd30a9c62c60-kube-api-access-nq45n\") pod \"aee16345-631c-48f8-a83b-bd30a9c62c60\" (UID: \"aee16345-631c-48f8-a83b-bd30a9c62c60\") " Oct 06 14:24:11 crc kubenswrapper[4757]: I1006 14:24:11.186548 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aee16345-631c-48f8-a83b-bd30a9c62c60-utilities" (OuterVolumeSpecName: "utilities") pod "aee16345-631c-48f8-a83b-bd30a9c62c60" (UID: "aee16345-631c-48f8-a83b-bd30a9c62c60"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 14:24:11 crc kubenswrapper[4757]: I1006 14:24:11.191449 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aee16345-631c-48f8-a83b-bd30a9c62c60-kube-api-access-nq45n" (OuterVolumeSpecName: "kube-api-access-nq45n") pod "aee16345-631c-48f8-a83b-bd30a9c62c60" (UID: "aee16345-631c-48f8-a83b-bd30a9c62c60"). InnerVolumeSpecName "kube-api-access-nq45n". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 14:24:11 crc kubenswrapper[4757]: I1006 14:24:11.233700 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aee16345-631c-48f8-a83b-bd30a9c62c60-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "aee16345-631c-48f8-a83b-bd30a9c62c60" (UID: "aee16345-631c-48f8-a83b-bd30a9c62c60"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 14:24:11 crc kubenswrapper[4757]: I1006 14:24:11.288191 4757 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aee16345-631c-48f8-a83b-bd30a9c62c60-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 06 14:24:11 crc kubenswrapper[4757]: I1006 14:24:11.288233 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nq45n\" (UniqueName: \"kubernetes.io/projected/aee16345-631c-48f8-a83b-bd30a9c62c60-kube-api-access-nq45n\") on node \"crc\" DevicePath \"\"" Oct 06 14:24:11 crc kubenswrapper[4757]: I1006 14:24:11.288248 4757 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aee16345-631c-48f8-a83b-bd30a9c62c60-utilities\") on node \"crc\" DevicePath \"\"" Oct 06 14:24:12 crc kubenswrapper[4757]: I1006 14:24:12.047348 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lmdhq" event={"ID":"aee16345-631c-48f8-a83b-bd30a9c62c60","Type":"ContainerDied","Data":"8185f3bbd3cd54bcb64ad1907644ae49c0c8d3249cfd78824980d96055eb8f0f"} Oct 06 14:24:12 crc kubenswrapper[4757]: I1006 14:24:12.047415 4757 scope.go:117] "RemoveContainer" containerID="395402b79fab0ba65d1706879e6453121eced8fc1d8d13f3f9f65f772582de0d" Oct 06 14:24:12 crc kubenswrapper[4757]: I1006 14:24:12.047809 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lmdhq" Oct 06 14:24:12 crc kubenswrapper[4757]: I1006 14:24:12.070777 4757 scope.go:117] "RemoveContainer" containerID="7d553c669e0d9fbf220c8f8fe04a2c7d54b2be0c86205e9d1ac7d0d1c3a99602" Oct 06 14:24:12 crc kubenswrapper[4757]: I1006 14:24:12.086407 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-lmdhq"] Oct 06 14:24:12 crc kubenswrapper[4757]: I1006 14:24:12.091760 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-lmdhq"] Oct 06 14:24:12 crc kubenswrapper[4757]: I1006 14:24:12.105607 4757 scope.go:117] "RemoveContainer" containerID="2d2e97283821c4be4ac239d7aceb1642d11c3ca2a6c916e5463fc4c8e30a89ab" Oct 06 14:24:12 crc kubenswrapper[4757]: I1006 14:24:12.188865 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aee16345-631c-48f8-a83b-bd30a9c62c60" path="/var/lib/kubelet/pods/aee16345-631c-48f8-a83b-bd30a9c62c60/volumes" Oct 06 14:26:04 crc kubenswrapper[4757]: I1006 14:26:04.361287 4757 patch_prober.go:28] interesting pod/machine-config-daemon-7tb7h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 06 14:26:04 crc kubenswrapper[4757]: I1006 14:26:04.362319 4757 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 06 14:26:34 crc kubenswrapper[4757]: I1006 14:26:34.360970 4757 patch_prober.go:28] interesting pod/machine-config-daemon-7tb7h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 06 14:26:34 crc kubenswrapper[4757]: I1006 14:26:34.361502 4757 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 06 14:27:04 crc kubenswrapper[4757]: I1006 14:27:04.361572 4757 patch_prober.go:28] interesting pod/machine-config-daemon-7tb7h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 06 14:27:04 crc kubenswrapper[4757]: I1006 14:27:04.362141 4757 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 06 14:27:04 crc kubenswrapper[4757]: I1006 14:27:04.362189 4757 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" Oct 06 14:27:04 crc kubenswrapper[4757]: I1006 14:27:04.362835 4757 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"99be7ecd47883c550ee964aaf41717a2d5ba03ae000da549f5ab40bf9d61bd40"} pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 06 14:27:04 crc kubenswrapper[4757]: I1006 14:27:04.362905 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" containerName="machine-config-daemon" containerID="cri-o://99be7ecd47883c550ee964aaf41717a2d5ba03ae000da549f5ab40bf9d61bd40" gracePeriod=600 Oct 06 14:27:04 crc kubenswrapper[4757]: E1006 14:27:04.488330 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:27:05 crc kubenswrapper[4757]: I1006 14:27:05.438406 4757 generic.go:334] "Generic (PLEG): container finished" podID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" containerID="99be7ecd47883c550ee964aaf41717a2d5ba03ae000da549f5ab40bf9d61bd40" exitCode=0 Oct 06 14:27:05 crc kubenswrapper[4757]: I1006 14:27:05.438534 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" event={"ID":"0010c888-d5ad-4b2b-8309-1647fdf0dee3","Type":"ContainerDied","Data":"99be7ecd47883c550ee964aaf41717a2d5ba03ae000da549f5ab40bf9d61bd40"} Oct 06 14:27:05 crc kubenswrapper[4757]: I1006 14:27:05.438850 4757 scope.go:117] "RemoveContainer" containerID="0a0da14d5df97879a51ed20c7b64fac97bf680cd8013e0a132c0db3c8e231bd7" Oct 06 14:27:05 crc kubenswrapper[4757]: I1006 14:27:05.440427 4757 scope.go:117] "RemoveContainer" containerID="99be7ecd47883c550ee964aaf41717a2d5ba03ae000da549f5ab40bf9d61bd40" Oct 06 14:27:05 crc kubenswrapper[4757]: E1006 14:27:05.441401 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:27:18 crc kubenswrapper[4757]: I1006 14:27:18.179953 4757 scope.go:117] "RemoveContainer" containerID="99be7ecd47883c550ee964aaf41717a2d5ba03ae000da549f5ab40bf9d61bd40" Oct 06 14:27:18 crc kubenswrapper[4757]: E1006 14:27:18.180782 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:27:29 crc kubenswrapper[4757]: I1006 14:27:29.180335 4757 scope.go:117] "RemoveContainer" containerID="99be7ecd47883c550ee964aaf41717a2d5ba03ae000da549f5ab40bf9d61bd40" Oct 06 14:27:29 crc kubenswrapper[4757]: E1006 14:27:29.180982 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:27:44 crc kubenswrapper[4757]: I1006 14:27:44.180988 4757 scope.go:117] "RemoveContainer" containerID="99be7ecd47883c550ee964aaf41717a2d5ba03ae000da549f5ab40bf9d61bd40" Oct 06 14:27:44 crc kubenswrapper[4757]: E1006 14:27:44.181906 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:27:58 crc kubenswrapper[4757]: I1006 14:27:58.180312 4757 scope.go:117] "RemoveContainer" containerID="99be7ecd47883c550ee964aaf41717a2d5ba03ae000da549f5ab40bf9d61bd40" Oct 06 14:27:58 crc kubenswrapper[4757]: E1006 14:27:58.181481 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:28:11 crc kubenswrapper[4757]: I1006 14:28:11.180075 4757 scope.go:117] "RemoveContainer" containerID="99be7ecd47883c550ee964aaf41717a2d5ba03ae000da549f5ab40bf9d61bd40" Oct 06 14:28:11 crc kubenswrapper[4757]: E1006 14:28:11.181676 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:28:26 crc kubenswrapper[4757]: I1006 14:28:26.179706 4757 scope.go:117] "RemoveContainer" containerID="99be7ecd47883c550ee964aaf41717a2d5ba03ae000da549f5ab40bf9d61bd40" Oct 06 14:28:26 crc kubenswrapper[4757]: E1006 14:28:26.181677 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:28:38 crc kubenswrapper[4757]: I1006 14:28:38.179759 4757 scope.go:117] "RemoveContainer" containerID="99be7ecd47883c550ee964aaf41717a2d5ba03ae000da549f5ab40bf9d61bd40" Oct 06 14:28:38 crc kubenswrapper[4757]: E1006 14:28:38.180658 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:28:49 crc kubenswrapper[4757]: I1006 14:28:49.179570 4757 scope.go:117] "RemoveContainer" containerID="99be7ecd47883c550ee964aaf41717a2d5ba03ae000da549f5ab40bf9d61bd40" Oct 06 14:28:49 crc kubenswrapper[4757]: E1006 14:28:49.180321 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:29:00 crc kubenswrapper[4757]: I1006 14:29:00.180597 4757 scope.go:117] "RemoveContainer" containerID="99be7ecd47883c550ee964aaf41717a2d5ba03ae000da549f5ab40bf9d61bd40" Oct 06 14:29:00 crc kubenswrapper[4757]: E1006 14:29:00.181917 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:29:14 crc kubenswrapper[4757]: I1006 14:29:14.180298 4757 scope.go:117] "RemoveContainer" containerID="99be7ecd47883c550ee964aaf41717a2d5ba03ae000da549f5ab40bf9d61bd40" Oct 06 14:29:14 crc kubenswrapper[4757]: E1006 14:29:14.181882 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:29:25 crc kubenswrapper[4757]: I1006 14:29:25.180528 4757 scope.go:117] "RemoveContainer" containerID="99be7ecd47883c550ee964aaf41717a2d5ba03ae000da549f5ab40bf9d61bd40" Oct 06 14:29:25 crc kubenswrapper[4757]: E1006 14:29:25.181455 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:29:40 crc kubenswrapper[4757]: I1006 14:29:40.179403 4757 scope.go:117] "RemoveContainer" containerID="99be7ecd47883c550ee964aaf41717a2d5ba03ae000da549f5ab40bf9d61bd40" Oct 06 14:29:40 crc kubenswrapper[4757]: E1006 14:29:40.180248 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:29:51 crc kubenswrapper[4757]: I1006 14:29:51.179975 4757 scope.go:117] "RemoveContainer" containerID="99be7ecd47883c550ee964aaf41717a2d5ba03ae000da549f5ab40bf9d61bd40" Oct 06 14:29:51 crc kubenswrapper[4757]: E1006 14:29:51.180756 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:30:00 crc kubenswrapper[4757]: I1006 14:30:00.170077 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29329350-85dn6"] Oct 06 14:30:00 crc kubenswrapper[4757]: E1006 14:30:00.170926 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3c992ff8-9e2c-4a19-bf89-89669bd42152" containerName="extract-utilities" Oct 06 14:30:00 crc kubenswrapper[4757]: I1006 14:30:00.170941 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="3c992ff8-9e2c-4a19-bf89-89669bd42152" containerName="extract-utilities" Oct 06 14:30:00 crc kubenswrapper[4757]: E1006 14:30:00.170953 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3c992ff8-9e2c-4a19-bf89-89669bd42152" containerName="extract-content" Oct 06 14:30:00 crc kubenswrapper[4757]: I1006 14:30:00.170959 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="3c992ff8-9e2c-4a19-bf89-89669bd42152" containerName="extract-content" Oct 06 14:30:00 crc kubenswrapper[4757]: E1006 14:30:00.170970 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aee16345-631c-48f8-a83b-bd30a9c62c60" containerName="extract-utilities" Oct 06 14:30:00 crc kubenswrapper[4757]: I1006 14:30:00.170978 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="aee16345-631c-48f8-a83b-bd30a9c62c60" containerName="extract-utilities" Oct 06 14:30:00 crc kubenswrapper[4757]: E1006 14:30:00.170989 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aee16345-631c-48f8-a83b-bd30a9c62c60" containerName="extract-content" Oct 06 14:30:00 crc kubenswrapper[4757]: I1006 14:30:00.170996 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="aee16345-631c-48f8-a83b-bd30a9c62c60" containerName="extract-content" Oct 06 14:30:00 crc kubenswrapper[4757]: E1006 14:30:00.171011 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aee16345-631c-48f8-a83b-bd30a9c62c60" containerName="registry-server" Oct 06 14:30:00 crc kubenswrapper[4757]: I1006 14:30:00.171017 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="aee16345-631c-48f8-a83b-bd30a9c62c60" containerName="registry-server" Oct 06 14:30:00 crc kubenswrapper[4757]: E1006 14:30:00.171032 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3c992ff8-9e2c-4a19-bf89-89669bd42152" containerName="registry-server" Oct 06 14:30:00 crc kubenswrapper[4757]: I1006 14:30:00.171039 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="3c992ff8-9e2c-4a19-bf89-89669bd42152" containerName="registry-server" Oct 06 14:30:00 crc kubenswrapper[4757]: I1006 14:30:00.171244 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="3c992ff8-9e2c-4a19-bf89-89669bd42152" containerName="registry-server" Oct 06 14:30:00 crc kubenswrapper[4757]: I1006 14:30:00.171270 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="aee16345-631c-48f8-a83b-bd30a9c62c60" containerName="registry-server" Oct 06 14:30:00 crc kubenswrapper[4757]: I1006 14:30:00.171858 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29329350-85dn6" Oct 06 14:30:00 crc kubenswrapper[4757]: I1006 14:30:00.174078 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 06 14:30:00 crc kubenswrapper[4757]: I1006 14:30:00.174638 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 06 14:30:00 crc kubenswrapper[4757]: I1006 14:30:00.178120 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29329350-85dn6"] Oct 06 14:30:00 crc kubenswrapper[4757]: I1006 14:30:00.263764 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2kttr\" (UniqueName: \"kubernetes.io/projected/c2108ec2-dc1c-4ab0-9fd4-1f3eaa56701b-kube-api-access-2kttr\") pod \"collect-profiles-29329350-85dn6\" (UID: \"c2108ec2-dc1c-4ab0-9fd4-1f3eaa56701b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29329350-85dn6" Oct 06 14:30:00 crc kubenswrapper[4757]: I1006 14:30:00.263887 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c2108ec2-dc1c-4ab0-9fd4-1f3eaa56701b-config-volume\") pod \"collect-profiles-29329350-85dn6\" (UID: \"c2108ec2-dc1c-4ab0-9fd4-1f3eaa56701b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29329350-85dn6" Oct 06 14:30:00 crc kubenswrapper[4757]: I1006 14:30:00.263960 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c2108ec2-dc1c-4ab0-9fd4-1f3eaa56701b-secret-volume\") pod \"collect-profiles-29329350-85dn6\" (UID: \"c2108ec2-dc1c-4ab0-9fd4-1f3eaa56701b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29329350-85dn6" Oct 06 14:30:00 crc kubenswrapper[4757]: I1006 14:30:00.366443 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2kttr\" (UniqueName: \"kubernetes.io/projected/c2108ec2-dc1c-4ab0-9fd4-1f3eaa56701b-kube-api-access-2kttr\") pod \"collect-profiles-29329350-85dn6\" (UID: \"c2108ec2-dc1c-4ab0-9fd4-1f3eaa56701b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29329350-85dn6" Oct 06 14:30:00 crc kubenswrapper[4757]: I1006 14:30:00.366831 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c2108ec2-dc1c-4ab0-9fd4-1f3eaa56701b-config-volume\") pod \"collect-profiles-29329350-85dn6\" (UID: \"c2108ec2-dc1c-4ab0-9fd4-1f3eaa56701b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29329350-85dn6" Oct 06 14:30:00 crc kubenswrapper[4757]: I1006 14:30:00.366954 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c2108ec2-dc1c-4ab0-9fd4-1f3eaa56701b-secret-volume\") pod \"collect-profiles-29329350-85dn6\" (UID: \"c2108ec2-dc1c-4ab0-9fd4-1f3eaa56701b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29329350-85dn6" Oct 06 14:30:00 crc kubenswrapper[4757]: I1006 14:30:00.367848 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c2108ec2-dc1c-4ab0-9fd4-1f3eaa56701b-config-volume\") pod \"collect-profiles-29329350-85dn6\" (UID: \"c2108ec2-dc1c-4ab0-9fd4-1f3eaa56701b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29329350-85dn6" Oct 06 14:30:00 crc kubenswrapper[4757]: I1006 14:30:00.372715 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c2108ec2-dc1c-4ab0-9fd4-1f3eaa56701b-secret-volume\") pod \"collect-profiles-29329350-85dn6\" (UID: \"c2108ec2-dc1c-4ab0-9fd4-1f3eaa56701b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29329350-85dn6" Oct 06 14:30:00 crc kubenswrapper[4757]: I1006 14:30:00.384043 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2kttr\" (UniqueName: \"kubernetes.io/projected/c2108ec2-dc1c-4ab0-9fd4-1f3eaa56701b-kube-api-access-2kttr\") pod \"collect-profiles-29329350-85dn6\" (UID: \"c2108ec2-dc1c-4ab0-9fd4-1f3eaa56701b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29329350-85dn6" Oct 06 14:30:00 crc kubenswrapper[4757]: I1006 14:30:00.493865 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29329350-85dn6" Oct 06 14:30:00 crc kubenswrapper[4757]: I1006 14:30:00.894150 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29329350-85dn6"] Oct 06 14:30:01 crc kubenswrapper[4757]: I1006 14:30:01.801907 4757 generic.go:334] "Generic (PLEG): container finished" podID="c2108ec2-dc1c-4ab0-9fd4-1f3eaa56701b" containerID="118ce4b8f05c7c8614498377bd6cc066ae817cb5df2570545d087f27510a9248" exitCode=0 Oct 06 14:30:01 crc kubenswrapper[4757]: I1006 14:30:01.802004 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29329350-85dn6" event={"ID":"c2108ec2-dc1c-4ab0-9fd4-1f3eaa56701b","Type":"ContainerDied","Data":"118ce4b8f05c7c8614498377bd6cc066ae817cb5df2570545d087f27510a9248"} Oct 06 14:30:01 crc kubenswrapper[4757]: I1006 14:30:01.802502 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29329350-85dn6" event={"ID":"c2108ec2-dc1c-4ab0-9fd4-1f3eaa56701b","Type":"ContainerStarted","Data":"057c254aadd43a08918651f21e32e2931527439615da8861197e91fc34122ce1"} Oct 06 14:30:03 crc kubenswrapper[4757]: I1006 14:30:03.170772 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29329350-85dn6" Oct 06 14:30:03 crc kubenswrapper[4757]: I1006 14:30:03.179803 4757 scope.go:117] "RemoveContainer" containerID="99be7ecd47883c550ee964aaf41717a2d5ba03ae000da549f5ab40bf9d61bd40" Oct 06 14:30:03 crc kubenswrapper[4757]: E1006 14:30:03.180182 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:30:03 crc kubenswrapper[4757]: I1006 14:30:03.315031 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2kttr\" (UniqueName: \"kubernetes.io/projected/c2108ec2-dc1c-4ab0-9fd4-1f3eaa56701b-kube-api-access-2kttr\") pod \"c2108ec2-dc1c-4ab0-9fd4-1f3eaa56701b\" (UID: \"c2108ec2-dc1c-4ab0-9fd4-1f3eaa56701b\") " Oct 06 14:30:03 crc kubenswrapper[4757]: I1006 14:30:03.315112 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c2108ec2-dc1c-4ab0-9fd4-1f3eaa56701b-secret-volume\") pod \"c2108ec2-dc1c-4ab0-9fd4-1f3eaa56701b\" (UID: \"c2108ec2-dc1c-4ab0-9fd4-1f3eaa56701b\") " Oct 06 14:30:03 crc kubenswrapper[4757]: I1006 14:30:03.315209 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c2108ec2-dc1c-4ab0-9fd4-1f3eaa56701b-config-volume\") pod \"c2108ec2-dc1c-4ab0-9fd4-1f3eaa56701b\" (UID: \"c2108ec2-dc1c-4ab0-9fd4-1f3eaa56701b\") " Oct 06 14:30:03 crc kubenswrapper[4757]: I1006 14:30:03.316930 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c2108ec2-dc1c-4ab0-9fd4-1f3eaa56701b-config-volume" (OuterVolumeSpecName: "config-volume") pod "c2108ec2-dc1c-4ab0-9fd4-1f3eaa56701b" (UID: "c2108ec2-dc1c-4ab0-9fd4-1f3eaa56701b"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 14:30:03 crc kubenswrapper[4757]: I1006 14:30:03.322457 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c2108ec2-dc1c-4ab0-9fd4-1f3eaa56701b-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "c2108ec2-dc1c-4ab0-9fd4-1f3eaa56701b" (UID: "c2108ec2-dc1c-4ab0-9fd4-1f3eaa56701b"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 14:30:03 crc kubenswrapper[4757]: I1006 14:30:03.324371 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c2108ec2-dc1c-4ab0-9fd4-1f3eaa56701b-kube-api-access-2kttr" (OuterVolumeSpecName: "kube-api-access-2kttr") pod "c2108ec2-dc1c-4ab0-9fd4-1f3eaa56701b" (UID: "c2108ec2-dc1c-4ab0-9fd4-1f3eaa56701b"). InnerVolumeSpecName "kube-api-access-2kttr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 14:30:03 crc kubenswrapper[4757]: I1006 14:30:03.417241 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2kttr\" (UniqueName: \"kubernetes.io/projected/c2108ec2-dc1c-4ab0-9fd4-1f3eaa56701b-kube-api-access-2kttr\") on node \"crc\" DevicePath \"\"" Oct 06 14:30:03 crc kubenswrapper[4757]: I1006 14:30:03.417283 4757 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c2108ec2-dc1c-4ab0-9fd4-1f3eaa56701b-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 06 14:30:03 crc kubenswrapper[4757]: I1006 14:30:03.417295 4757 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c2108ec2-dc1c-4ab0-9fd4-1f3eaa56701b-config-volume\") on node \"crc\" DevicePath \"\"" Oct 06 14:30:03 crc kubenswrapper[4757]: I1006 14:30:03.823207 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29329350-85dn6" event={"ID":"c2108ec2-dc1c-4ab0-9fd4-1f3eaa56701b","Type":"ContainerDied","Data":"057c254aadd43a08918651f21e32e2931527439615da8861197e91fc34122ce1"} Oct 06 14:30:03 crc kubenswrapper[4757]: I1006 14:30:03.823261 4757 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="057c254aadd43a08918651f21e32e2931527439615da8861197e91fc34122ce1" Oct 06 14:30:03 crc kubenswrapper[4757]: I1006 14:30:03.823267 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29329350-85dn6" Oct 06 14:30:04 crc kubenswrapper[4757]: I1006 14:30:04.256836 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29329305-824dj"] Oct 06 14:30:04 crc kubenswrapper[4757]: I1006 14:30:04.263835 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29329305-824dj"] Oct 06 14:30:06 crc kubenswrapper[4757]: I1006 14:30:06.189202 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e38f0f63-c4cf-464b-8daa-28560496b8e3" path="/var/lib/kubelet/pods/e38f0f63-c4cf-464b-8daa-28560496b8e3/volumes" Oct 06 14:30:17 crc kubenswrapper[4757]: I1006 14:30:17.180923 4757 scope.go:117] "RemoveContainer" containerID="99be7ecd47883c550ee964aaf41717a2d5ba03ae000da549f5ab40bf9d61bd40" Oct 06 14:30:17 crc kubenswrapper[4757]: E1006 14:30:17.181826 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:30:31 crc kubenswrapper[4757]: I1006 14:30:31.179938 4757 scope.go:117] "RemoveContainer" containerID="99be7ecd47883c550ee964aaf41717a2d5ba03ae000da549f5ab40bf9d61bd40" Oct 06 14:30:31 crc kubenswrapper[4757]: E1006 14:30:31.180891 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:30:36 crc kubenswrapper[4757]: I1006 14:30:36.523439 4757 scope.go:117] "RemoveContainer" containerID="7e8fd278f05575a50202632745aeea7ae16a3a20c85ec4e429878474f281cd15" Oct 06 14:30:42 crc kubenswrapper[4757]: I1006 14:30:42.185406 4757 scope.go:117] "RemoveContainer" containerID="99be7ecd47883c550ee964aaf41717a2d5ba03ae000da549f5ab40bf9d61bd40" Oct 06 14:30:42 crc kubenswrapper[4757]: E1006 14:30:42.186282 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:30:57 crc kubenswrapper[4757]: I1006 14:30:57.181376 4757 scope.go:117] "RemoveContainer" containerID="99be7ecd47883c550ee964aaf41717a2d5ba03ae000da549f5ab40bf9d61bd40" Oct 06 14:30:57 crc kubenswrapper[4757]: E1006 14:30:57.182770 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:31:07 crc kubenswrapper[4757]: I1006 14:31:07.318574 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-rf4v4"] Oct 06 14:31:07 crc kubenswrapper[4757]: E1006 14:31:07.320619 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c2108ec2-dc1c-4ab0-9fd4-1f3eaa56701b" containerName="collect-profiles" Oct 06 14:31:07 crc kubenswrapper[4757]: I1006 14:31:07.320641 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="c2108ec2-dc1c-4ab0-9fd4-1f3eaa56701b" containerName="collect-profiles" Oct 06 14:31:07 crc kubenswrapper[4757]: I1006 14:31:07.320894 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="c2108ec2-dc1c-4ab0-9fd4-1f3eaa56701b" containerName="collect-profiles" Oct 06 14:31:07 crc kubenswrapper[4757]: I1006 14:31:07.322588 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rf4v4" Oct 06 14:31:07 crc kubenswrapper[4757]: I1006 14:31:07.330894 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-rf4v4"] Oct 06 14:31:07 crc kubenswrapper[4757]: I1006 14:31:07.449750 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/420c20b5-1fb0-4dea-8f2e-f5a8b5df3ad5-utilities\") pod \"redhat-marketplace-rf4v4\" (UID: \"420c20b5-1fb0-4dea-8f2e-f5a8b5df3ad5\") " pod="openshift-marketplace/redhat-marketplace-rf4v4" Oct 06 14:31:07 crc kubenswrapper[4757]: I1006 14:31:07.449822 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nsldv\" (UniqueName: \"kubernetes.io/projected/420c20b5-1fb0-4dea-8f2e-f5a8b5df3ad5-kube-api-access-nsldv\") pod \"redhat-marketplace-rf4v4\" (UID: \"420c20b5-1fb0-4dea-8f2e-f5a8b5df3ad5\") " pod="openshift-marketplace/redhat-marketplace-rf4v4" Oct 06 14:31:07 crc kubenswrapper[4757]: I1006 14:31:07.449885 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/420c20b5-1fb0-4dea-8f2e-f5a8b5df3ad5-catalog-content\") pod \"redhat-marketplace-rf4v4\" (UID: \"420c20b5-1fb0-4dea-8f2e-f5a8b5df3ad5\") " pod="openshift-marketplace/redhat-marketplace-rf4v4" Oct 06 14:31:07 crc kubenswrapper[4757]: I1006 14:31:07.551104 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/420c20b5-1fb0-4dea-8f2e-f5a8b5df3ad5-utilities\") pod \"redhat-marketplace-rf4v4\" (UID: \"420c20b5-1fb0-4dea-8f2e-f5a8b5df3ad5\") " pod="openshift-marketplace/redhat-marketplace-rf4v4" Oct 06 14:31:07 crc kubenswrapper[4757]: I1006 14:31:07.551194 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nsldv\" (UniqueName: \"kubernetes.io/projected/420c20b5-1fb0-4dea-8f2e-f5a8b5df3ad5-kube-api-access-nsldv\") pod \"redhat-marketplace-rf4v4\" (UID: \"420c20b5-1fb0-4dea-8f2e-f5a8b5df3ad5\") " pod="openshift-marketplace/redhat-marketplace-rf4v4" Oct 06 14:31:07 crc kubenswrapper[4757]: I1006 14:31:07.551227 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/420c20b5-1fb0-4dea-8f2e-f5a8b5df3ad5-catalog-content\") pod \"redhat-marketplace-rf4v4\" (UID: \"420c20b5-1fb0-4dea-8f2e-f5a8b5df3ad5\") " pod="openshift-marketplace/redhat-marketplace-rf4v4" Oct 06 14:31:07 crc kubenswrapper[4757]: I1006 14:31:07.551749 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/420c20b5-1fb0-4dea-8f2e-f5a8b5df3ad5-utilities\") pod \"redhat-marketplace-rf4v4\" (UID: \"420c20b5-1fb0-4dea-8f2e-f5a8b5df3ad5\") " pod="openshift-marketplace/redhat-marketplace-rf4v4" Oct 06 14:31:07 crc kubenswrapper[4757]: I1006 14:31:07.551793 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/420c20b5-1fb0-4dea-8f2e-f5a8b5df3ad5-catalog-content\") pod \"redhat-marketplace-rf4v4\" (UID: \"420c20b5-1fb0-4dea-8f2e-f5a8b5df3ad5\") " pod="openshift-marketplace/redhat-marketplace-rf4v4" Oct 06 14:31:07 crc kubenswrapper[4757]: I1006 14:31:07.571732 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nsldv\" (UniqueName: \"kubernetes.io/projected/420c20b5-1fb0-4dea-8f2e-f5a8b5df3ad5-kube-api-access-nsldv\") pod \"redhat-marketplace-rf4v4\" (UID: \"420c20b5-1fb0-4dea-8f2e-f5a8b5df3ad5\") " pod="openshift-marketplace/redhat-marketplace-rf4v4" Oct 06 14:31:07 crc kubenswrapper[4757]: I1006 14:31:07.654221 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rf4v4" Oct 06 14:31:08 crc kubenswrapper[4757]: I1006 14:31:08.083060 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-rf4v4"] Oct 06 14:31:08 crc kubenswrapper[4757]: I1006 14:31:08.293610 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rf4v4" event={"ID":"420c20b5-1fb0-4dea-8f2e-f5a8b5df3ad5","Type":"ContainerStarted","Data":"0562ea043b2c3114e3e79fb49c3520ae4940102c70237e129559541eccaac56f"} Oct 06 14:31:09 crc kubenswrapper[4757]: I1006 14:31:09.307490 4757 generic.go:334] "Generic (PLEG): container finished" podID="420c20b5-1fb0-4dea-8f2e-f5a8b5df3ad5" containerID="ef0fe706b8f67b18a5f341d8500725b4695ee0181db2ab93805b202fb1fc1803" exitCode=0 Oct 06 14:31:09 crc kubenswrapper[4757]: I1006 14:31:09.307760 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rf4v4" event={"ID":"420c20b5-1fb0-4dea-8f2e-f5a8b5df3ad5","Type":"ContainerDied","Data":"ef0fe706b8f67b18a5f341d8500725b4695ee0181db2ab93805b202fb1fc1803"} Oct 06 14:31:09 crc kubenswrapper[4757]: I1006 14:31:09.311198 4757 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 06 14:31:10 crc kubenswrapper[4757]: I1006 14:31:10.180201 4757 scope.go:117] "RemoveContainer" containerID="99be7ecd47883c550ee964aaf41717a2d5ba03ae000da549f5ab40bf9d61bd40" Oct 06 14:31:10 crc kubenswrapper[4757]: E1006 14:31:10.180479 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:31:11 crc kubenswrapper[4757]: I1006 14:31:11.325555 4757 generic.go:334] "Generic (PLEG): container finished" podID="420c20b5-1fb0-4dea-8f2e-f5a8b5df3ad5" containerID="df81957d064be692ca0561fc42f1e754023e6ee5eb0e164cf8788c7c4eee11f5" exitCode=0 Oct 06 14:31:11 crc kubenswrapper[4757]: I1006 14:31:11.325637 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rf4v4" event={"ID":"420c20b5-1fb0-4dea-8f2e-f5a8b5df3ad5","Type":"ContainerDied","Data":"df81957d064be692ca0561fc42f1e754023e6ee5eb0e164cf8788c7c4eee11f5"} Oct 06 14:31:12 crc kubenswrapper[4757]: I1006 14:31:12.333665 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rf4v4" event={"ID":"420c20b5-1fb0-4dea-8f2e-f5a8b5df3ad5","Type":"ContainerStarted","Data":"f1903a3701720e64c6ce10f166fafef09f45585bc199b9581473354ea4e4d7f9"} Oct 06 14:31:12 crc kubenswrapper[4757]: I1006 14:31:12.359429 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-rf4v4" podStartSLOduration=2.879911872 podStartE2EDuration="5.359404623s" podCreationTimestamp="2025-10-06 14:31:07 +0000 UTC" firstStartedPulling="2025-10-06 14:31:09.310877807 +0000 UTC m=+3157.808196344" lastFinishedPulling="2025-10-06 14:31:11.790370558 +0000 UTC m=+3160.287689095" observedRunningTime="2025-10-06 14:31:12.352963156 +0000 UTC m=+3160.850281703" watchObservedRunningTime="2025-10-06 14:31:12.359404623 +0000 UTC m=+3160.856723160" Oct 06 14:31:17 crc kubenswrapper[4757]: I1006 14:31:17.654541 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-rf4v4" Oct 06 14:31:17 crc kubenswrapper[4757]: I1006 14:31:17.654968 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-rf4v4" Oct 06 14:31:17 crc kubenswrapper[4757]: I1006 14:31:17.700049 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-rf4v4" Oct 06 14:31:18 crc kubenswrapper[4757]: I1006 14:31:18.432086 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-rf4v4" Oct 06 14:31:18 crc kubenswrapper[4757]: I1006 14:31:18.487026 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-rf4v4"] Oct 06 14:31:20 crc kubenswrapper[4757]: I1006 14:31:20.409556 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-rf4v4" podUID="420c20b5-1fb0-4dea-8f2e-f5a8b5df3ad5" containerName="registry-server" containerID="cri-o://f1903a3701720e64c6ce10f166fafef09f45585bc199b9581473354ea4e4d7f9" gracePeriod=2 Oct 06 14:31:20 crc kubenswrapper[4757]: I1006 14:31:20.833060 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rf4v4" Oct 06 14:31:20 crc kubenswrapper[4757]: I1006 14:31:20.942956 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nsldv\" (UniqueName: \"kubernetes.io/projected/420c20b5-1fb0-4dea-8f2e-f5a8b5df3ad5-kube-api-access-nsldv\") pod \"420c20b5-1fb0-4dea-8f2e-f5a8b5df3ad5\" (UID: \"420c20b5-1fb0-4dea-8f2e-f5a8b5df3ad5\") " Oct 06 14:31:20 crc kubenswrapper[4757]: I1006 14:31:20.943037 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/420c20b5-1fb0-4dea-8f2e-f5a8b5df3ad5-utilities\") pod \"420c20b5-1fb0-4dea-8f2e-f5a8b5df3ad5\" (UID: \"420c20b5-1fb0-4dea-8f2e-f5a8b5df3ad5\") " Oct 06 14:31:20 crc kubenswrapper[4757]: I1006 14:31:20.943117 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/420c20b5-1fb0-4dea-8f2e-f5a8b5df3ad5-catalog-content\") pod \"420c20b5-1fb0-4dea-8f2e-f5a8b5df3ad5\" (UID: \"420c20b5-1fb0-4dea-8f2e-f5a8b5df3ad5\") " Oct 06 14:31:20 crc kubenswrapper[4757]: I1006 14:31:20.943802 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/420c20b5-1fb0-4dea-8f2e-f5a8b5df3ad5-utilities" (OuterVolumeSpecName: "utilities") pod "420c20b5-1fb0-4dea-8f2e-f5a8b5df3ad5" (UID: "420c20b5-1fb0-4dea-8f2e-f5a8b5df3ad5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 14:31:20 crc kubenswrapper[4757]: I1006 14:31:20.950316 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/420c20b5-1fb0-4dea-8f2e-f5a8b5df3ad5-kube-api-access-nsldv" (OuterVolumeSpecName: "kube-api-access-nsldv") pod "420c20b5-1fb0-4dea-8f2e-f5a8b5df3ad5" (UID: "420c20b5-1fb0-4dea-8f2e-f5a8b5df3ad5"). InnerVolumeSpecName "kube-api-access-nsldv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 14:31:20 crc kubenswrapper[4757]: I1006 14:31:20.957466 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/420c20b5-1fb0-4dea-8f2e-f5a8b5df3ad5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "420c20b5-1fb0-4dea-8f2e-f5a8b5df3ad5" (UID: "420c20b5-1fb0-4dea-8f2e-f5a8b5df3ad5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 14:31:21 crc kubenswrapper[4757]: I1006 14:31:21.044985 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nsldv\" (UniqueName: \"kubernetes.io/projected/420c20b5-1fb0-4dea-8f2e-f5a8b5df3ad5-kube-api-access-nsldv\") on node \"crc\" DevicePath \"\"" Oct 06 14:31:21 crc kubenswrapper[4757]: I1006 14:31:21.045044 4757 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/420c20b5-1fb0-4dea-8f2e-f5a8b5df3ad5-utilities\") on node \"crc\" DevicePath \"\"" Oct 06 14:31:21 crc kubenswrapper[4757]: I1006 14:31:21.045058 4757 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/420c20b5-1fb0-4dea-8f2e-f5a8b5df3ad5-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 06 14:31:21 crc kubenswrapper[4757]: I1006 14:31:21.422071 4757 generic.go:334] "Generic (PLEG): container finished" podID="420c20b5-1fb0-4dea-8f2e-f5a8b5df3ad5" containerID="f1903a3701720e64c6ce10f166fafef09f45585bc199b9581473354ea4e4d7f9" exitCode=0 Oct 06 14:31:21 crc kubenswrapper[4757]: I1006 14:31:21.422142 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rf4v4" event={"ID":"420c20b5-1fb0-4dea-8f2e-f5a8b5df3ad5","Type":"ContainerDied","Data":"f1903a3701720e64c6ce10f166fafef09f45585bc199b9581473354ea4e4d7f9"} Oct 06 14:31:21 crc kubenswrapper[4757]: I1006 14:31:21.422262 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rf4v4" event={"ID":"420c20b5-1fb0-4dea-8f2e-f5a8b5df3ad5","Type":"ContainerDied","Data":"0562ea043b2c3114e3e79fb49c3520ae4940102c70237e129559541eccaac56f"} Oct 06 14:31:21 crc kubenswrapper[4757]: I1006 14:31:21.422291 4757 scope.go:117] "RemoveContainer" containerID="f1903a3701720e64c6ce10f166fafef09f45585bc199b9581473354ea4e4d7f9" Oct 06 14:31:21 crc kubenswrapper[4757]: I1006 14:31:21.422469 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rf4v4" Oct 06 14:31:21 crc kubenswrapper[4757]: I1006 14:31:21.448846 4757 scope.go:117] "RemoveContainer" containerID="df81957d064be692ca0561fc42f1e754023e6ee5eb0e164cf8788c7c4eee11f5" Oct 06 14:31:21 crc kubenswrapper[4757]: I1006 14:31:21.457610 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-rf4v4"] Oct 06 14:31:21 crc kubenswrapper[4757]: I1006 14:31:21.462605 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-rf4v4"] Oct 06 14:31:21 crc kubenswrapper[4757]: I1006 14:31:21.480117 4757 scope.go:117] "RemoveContainer" containerID="ef0fe706b8f67b18a5f341d8500725b4695ee0181db2ab93805b202fb1fc1803" Oct 06 14:31:21 crc kubenswrapper[4757]: I1006 14:31:21.495051 4757 scope.go:117] "RemoveContainer" containerID="f1903a3701720e64c6ce10f166fafef09f45585bc199b9581473354ea4e4d7f9" Oct 06 14:31:21 crc kubenswrapper[4757]: E1006 14:31:21.495588 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f1903a3701720e64c6ce10f166fafef09f45585bc199b9581473354ea4e4d7f9\": container with ID starting with f1903a3701720e64c6ce10f166fafef09f45585bc199b9581473354ea4e4d7f9 not found: ID does not exist" containerID="f1903a3701720e64c6ce10f166fafef09f45585bc199b9581473354ea4e4d7f9" Oct 06 14:31:21 crc kubenswrapper[4757]: I1006 14:31:21.495627 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f1903a3701720e64c6ce10f166fafef09f45585bc199b9581473354ea4e4d7f9"} err="failed to get container status \"f1903a3701720e64c6ce10f166fafef09f45585bc199b9581473354ea4e4d7f9\": rpc error: code = NotFound desc = could not find container \"f1903a3701720e64c6ce10f166fafef09f45585bc199b9581473354ea4e4d7f9\": container with ID starting with f1903a3701720e64c6ce10f166fafef09f45585bc199b9581473354ea4e4d7f9 not found: ID does not exist" Oct 06 14:31:21 crc kubenswrapper[4757]: I1006 14:31:21.495654 4757 scope.go:117] "RemoveContainer" containerID="df81957d064be692ca0561fc42f1e754023e6ee5eb0e164cf8788c7c4eee11f5" Oct 06 14:31:21 crc kubenswrapper[4757]: E1006 14:31:21.496010 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"df81957d064be692ca0561fc42f1e754023e6ee5eb0e164cf8788c7c4eee11f5\": container with ID starting with df81957d064be692ca0561fc42f1e754023e6ee5eb0e164cf8788c7c4eee11f5 not found: ID does not exist" containerID="df81957d064be692ca0561fc42f1e754023e6ee5eb0e164cf8788c7c4eee11f5" Oct 06 14:31:21 crc kubenswrapper[4757]: I1006 14:31:21.496029 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"df81957d064be692ca0561fc42f1e754023e6ee5eb0e164cf8788c7c4eee11f5"} err="failed to get container status \"df81957d064be692ca0561fc42f1e754023e6ee5eb0e164cf8788c7c4eee11f5\": rpc error: code = NotFound desc = could not find container \"df81957d064be692ca0561fc42f1e754023e6ee5eb0e164cf8788c7c4eee11f5\": container with ID starting with df81957d064be692ca0561fc42f1e754023e6ee5eb0e164cf8788c7c4eee11f5 not found: ID does not exist" Oct 06 14:31:21 crc kubenswrapper[4757]: I1006 14:31:21.496043 4757 scope.go:117] "RemoveContainer" containerID="ef0fe706b8f67b18a5f341d8500725b4695ee0181db2ab93805b202fb1fc1803" Oct 06 14:31:21 crc kubenswrapper[4757]: E1006 14:31:21.496372 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ef0fe706b8f67b18a5f341d8500725b4695ee0181db2ab93805b202fb1fc1803\": container with ID starting with ef0fe706b8f67b18a5f341d8500725b4695ee0181db2ab93805b202fb1fc1803 not found: ID does not exist" containerID="ef0fe706b8f67b18a5f341d8500725b4695ee0181db2ab93805b202fb1fc1803" Oct 06 14:31:21 crc kubenswrapper[4757]: I1006 14:31:21.496431 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ef0fe706b8f67b18a5f341d8500725b4695ee0181db2ab93805b202fb1fc1803"} err="failed to get container status \"ef0fe706b8f67b18a5f341d8500725b4695ee0181db2ab93805b202fb1fc1803\": rpc error: code = NotFound desc = could not find container \"ef0fe706b8f67b18a5f341d8500725b4695ee0181db2ab93805b202fb1fc1803\": container with ID starting with ef0fe706b8f67b18a5f341d8500725b4695ee0181db2ab93805b202fb1fc1803 not found: ID does not exist" Oct 06 14:31:22 crc kubenswrapper[4757]: I1006 14:31:22.205436 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="420c20b5-1fb0-4dea-8f2e-f5a8b5df3ad5" path="/var/lib/kubelet/pods/420c20b5-1fb0-4dea-8f2e-f5a8b5df3ad5/volumes" Oct 06 14:31:23 crc kubenswrapper[4757]: I1006 14:31:23.180725 4757 scope.go:117] "RemoveContainer" containerID="99be7ecd47883c550ee964aaf41717a2d5ba03ae000da549f5ab40bf9d61bd40" Oct 06 14:31:23 crc kubenswrapper[4757]: E1006 14:31:23.181566 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:31:36 crc kubenswrapper[4757]: I1006 14:31:36.180493 4757 scope.go:117] "RemoveContainer" containerID="99be7ecd47883c550ee964aaf41717a2d5ba03ae000da549f5ab40bf9d61bd40" Oct 06 14:31:36 crc kubenswrapper[4757]: E1006 14:31:36.182606 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:31:47 crc kubenswrapper[4757]: I1006 14:31:47.180537 4757 scope.go:117] "RemoveContainer" containerID="99be7ecd47883c550ee964aaf41717a2d5ba03ae000da549f5ab40bf9d61bd40" Oct 06 14:31:47 crc kubenswrapper[4757]: E1006 14:31:47.181282 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:31:48 crc kubenswrapper[4757]: I1006 14:31:48.373875 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-fg7pm"] Oct 06 14:31:48 crc kubenswrapper[4757]: E1006 14:31:48.374476 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="420c20b5-1fb0-4dea-8f2e-f5a8b5df3ad5" containerName="extract-content" Oct 06 14:31:48 crc kubenswrapper[4757]: I1006 14:31:48.374488 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="420c20b5-1fb0-4dea-8f2e-f5a8b5df3ad5" containerName="extract-content" Oct 06 14:31:48 crc kubenswrapper[4757]: E1006 14:31:48.374505 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="420c20b5-1fb0-4dea-8f2e-f5a8b5df3ad5" containerName="extract-utilities" Oct 06 14:31:48 crc kubenswrapper[4757]: I1006 14:31:48.374512 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="420c20b5-1fb0-4dea-8f2e-f5a8b5df3ad5" containerName="extract-utilities" Oct 06 14:31:48 crc kubenswrapper[4757]: E1006 14:31:48.374526 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="420c20b5-1fb0-4dea-8f2e-f5a8b5df3ad5" containerName="registry-server" Oct 06 14:31:48 crc kubenswrapper[4757]: I1006 14:31:48.374532 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="420c20b5-1fb0-4dea-8f2e-f5a8b5df3ad5" containerName="registry-server" Oct 06 14:31:48 crc kubenswrapper[4757]: I1006 14:31:48.374685 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="420c20b5-1fb0-4dea-8f2e-f5a8b5df3ad5" containerName="registry-server" Oct 06 14:31:48 crc kubenswrapper[4757]: I1006 14:31:48.375886 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fg7pm" Oct 06 14:31:48 crc kubenswrapper[4757]: I1006 14:31:48.386361 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-fg7pm"] Oct 06 14:31:48 crc kubenswrapper[4757]: I1006 14:31:48.532712 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/556682c7-4e9f-4c67-a636-edd244a30638-utilities\") pod \"redhat-operators-fg7pm\" (UID: \"556682c7-4e9f-4c67-a636-edd244a30638\") " pod="openshift-marketplace/redhat-operators-fg7pm" Oct 06 14:31:48 crc kubenswrapper[4757]: I1006 14:31:48.532792 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/556682c7-4e9f-4c67-a636-edd244a30638-catalog-content\") pod \"redhat-operators-fg7pm\" (UID: \"556682c7-4e9f-4c67-a636-edd244a30638\") " pod="openshift-marketplace/redhat-operators-fg7pm" Oct 06 14:31:48 crc kubenswrapper[4757]: I1006 14:31:48.532859 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dhj54\" (UniqueName: \"kubernetes.io/projected/556682c7-4e9f-4c67-a636-edd244a30638-kube-api-access-dhj54\") pod \"redhat-operators-fg7pm\" (UID: \"556682c7-4e9f-4c67-a636-edd244a30638\") " pod="openshift-marketplace/redhat-operators-fg7pm" Oct 06 14:31:48 crc kubenswrapper[4757]: I1006 14:31:48.633777 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/556682c7-4e9f-4c67-a636-edd244a30638-catalog-content\") pod \"redhat-operators-fg7pm\" (UID: \"556682c7-4e9f-4c67-a636-edd244a30638\") " pod="openshift-marketplace/redhat-operators-fg7pm" Oct 06 14:31:48 crc kubenswrapper[4757]: I1006 14:31:48.633865 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dhj54\" (UniqueName: \"kubernetes.io/projected/556682c7-4e9f-4c67-a636-edd244a30638-kube-api-access-dhj54\") pod \"redhat-operators-fg7pm\" (UID: \"556682c7-4e9f-4c67-a636-edd244a30638\") " pod="openshift-marketplace/redhat-operators-fg7pm" Oct 06 14:31:48 crc kubenswrapper[4757]: I1006 14:31:48.633897 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/556682c7-4e9f-4c67-a636-edd244a30638-utilities\") pod \"redhat-operators-fg7pm\" (UID: \"556682c7-4e9f-4c67-a636-edd244a30638\") " pod="openshift-marketplace/redhat-operators-fg7pm" Oct 06 14:31:48 crc kubenswrapper[4757]: I1006 14:31:48.634380 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/556682c7-4e9f-4c67-a636-edd244a30638-catalog-content\") pod \"redhat-operators-fg7pm\" (UID: \"556682c7-4e9f-4c67-a636-edd244a30638\") " pod="openshift-marketplace/redhat-operators-fg7pm" Oct 06 14:31:48 crc kubenswrapper[4757]: I1006 14:31:48.634396 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/556682c7-4e9f-4c67-a636-edd244a30638-utilities\") pod \"redhat-operators-fg7pm\" (UID: \"556682c7-4e9f-4c67-a636-edd244a30638\") " pod="openshift-marketplace/redhat-operators-fg7pm" Oct 06 14:31:48 crc kubenswrapper[4757]: I1006 14:31:48.653856 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dhj54\" (UniqueName: \"kubernetes.io/projected/556682c7-4e9f-4c67-a636-edd244a30638-kube-api-access-dhj54\") pod \"redhat-operators-fg7pm\" (UID: \"556682c7-4e9f-4c67-a636-edd244a30638\") " pod="openshift-marketplace/redhat-operators-fg7pm" Oct 06 14:31:48 crc kubenswrapper[4757]: I1006 14:31:48.709386 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fg7pm" Oct 06 14:31:49 crc kubenswrapper[4757]: I1006 14:31:49.153518 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-fg7pm"] Oct 06 14:31:49 crc kubenswrapper[4757]: I1006 14:31:49.626830 4757 generic.go:334] "Generic (PLEG): container finished" podID="556682c7-4e9f-4c67-a636-edd244a30638" containerID="ec4dc336b782c5db2c03491e826a5090fc2789a000b637c0368e26eaf635ca01" exitCode=0 Oct 06 14:31:49 crc kubenswrapper[4757]: I1006 14:31:49.626901 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fg7pm" event={"ID":"556682c7-4e9f-4c67-a636-edd244a30638","Type":"ContainerDied","Data":"ec4dc336b782c5db2c03491e826a5090fc2789a000b637c0368e26eaf635ca01"} Oct 06 14:31:49 crc kubenswrapper[4757]: I1006 14:31:49.626945 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fg7pm" event={"ID":"556682c7-4e9f-4c67-a636-edd244a30638","Type":"ContainerStarted","Data":"18f9bef5f5a253f57538c27ba0cc944737dfb3e4fb927ec0474e0054257e54f8"} Oct 06 14:31:50 crc kubenswrapper[4757]: I1006 14:31:50.634859 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fg7pm" event={"ID":"556682c7-4e9f-4c67-a636-edd244a30638","Type":"ContainerStarted","Data":"d72cc9cb4b07493426fd40f046871c510fbc0fd2e17128e857ea840c8b7294b8"} Oct 06 14:31:51 crc kubenswrapper[4757]: I1006 14:31:51.642364 4757 generic.go:334] "Generic (PLEG): container finished" podID="556682c7-4e9f-4c67-a636-edd244a30638" containerID="d72cc9cb4b07493426fd40f046871c510fbc0fd2e17128e857ea840c8b7294b8" exitCode=0 Oct 06 14:31:51 crc kubenswrapper[4757]: I1006 14:31:51.642408 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fg7pm" event={"ID":"556682c7-4e9f-4c67-a636-edd244a30638","Type":"ContainerDied","Data":"d72cc9cb4b07493426fd40f046871c510fbc0fd2e17128e857ea840c8b7294b8"} Oct 06 14:31:52 crc kubenswrapper[4757]: I1006 14:31:52.652560 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fg7pm" event={"ID":"556682c7-4e9f-4c67-a636-edd244a30638","Type":"ContainerStarted","Data":"e2e4a669a0c36f1bd5723ac6cf4662bd1291bf483c99ef1149c14d3d152e5334"} Oct 06 14:31:52 crc kubenswrapper[4757]: I1006 14:31:52.671242 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-fg7pm" podStartSLOduration=2.219327281 podStartE2EDuration="4.671224967s" podCreationTimestamp="2025-10-06 14:31:48 +0000 UTC" firstStartedPulling="2025-10-06 14:31:49.628518537 +0000 UTC m=+3198.125837074" lastFinishedPulling="2025-10-06 14:31:52.080416233 +0000 UTC m=+3200.577734760" observedRunningTime="2025-10-06 14:31:52.669124759 +0000 UTC m=+3201.166443316" watchObservedRunningTime="2025-10-06 14:31:52.671224967 +0000 UTC m=+3201.168543504" Oct 06 14:31:58 crc kubenswrapper[4757]: I1006 14:31:58.709682 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-fg7pm" Oct 06 14:31:58 crc kubenswrapper[4757]: I1006 14:31:58.710298 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-fg7pm" Oct 06 14:31:58 crc kubenswrapper[4757]: I1006 14:31:58.751817 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-fg7pm" Oct 06 14:31:59 crc kubenswrapper[4757]: I1006 14:31:59.760338 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-fg7pm" Oct 06 14:31:59 crc kubenswrapper[4757]: I1006 14:31:59.823673 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-fg7pm"] Oct 06 14:32:01 crc kubenswrapper[4757]: I1006 14:32:01.179944 4757 scope.go:117] "RemoveContainer" containerID="99be7ecd47883c550ee964aaf41717a2d5ba03ae000da549f5ab40bf9d61bd40" Oct 06 14:32:01 crc kubenswrapper[4757]: E1006 14:32:01.180287 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:32:01 crc kubenswrapper[4757]: I1006 14:32:01.717448 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-fg7pm" podUID="556682c7-4e9f-4c67-a636-edd244a30638" containerName="registry-server" containerID="cri-o://e2e4a669a0c36f1bd5723ac6cf4662bd1291bf483c99ef1149c14d3d152e5334" gracePeriod=2 Oct 06 14:32:02 crc kubenswrapper[4757]: I1006 14:32:02.608020 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fg7pm" Oct 06 14:32:02 crc kubenswrapper[4757]: I1006 14:32:02.726856 4757 generic.go:334] "Generic (PLEG): container finished" podID="556682c7-4e9f-4c67-a636-edd244a30638" containerID="e2e4a669a0c36f1bd5723ac6cf4662bd1291bf483c99ef1149c14d3d152e5334" exitCode=0 Oct 06 14:32:02 crc kubenswrapper[4757]: I1006 14:32:02.726907 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fg7pm" event={"ID":"556682c7-4e9f-4c67-a636-edd244a30638","Type":"ContainerDied","Data":"e2e4a669a0c36f1bd5723ac6cf4662bd1291bf483c99ef1149c14d3d152e5334"} Oct 06 14:32:02 crc kubenswrapper[4757]: I1006 14:32:02.726933 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fg7pm" Oct 06 14:32:02 crc kubenswrapper[4757]: I1006 14:32:02.726957 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fg7pm" event={"ID":"556682c7-4e9f-4c67-a636-edd244a30638","Type":"ContainerDied","Data":"18f9bef5f5a253f57538c27ba0cc944737dfb3e4fb927ec0474e0054257e54f8"} Oct 06 14:32:02 crc kubenswrapper[4757]: I1006 14:32:02.726979 4757 scope.go:117] "RemoveContainer" containerID="e2e4a669a0c36f1bd5723ac6cf4662bd1291bf483c99ef1149c14d3d152e5334" Oct 06 14:32:02 crc kubenswrapper[4757]: I1006 14:32:02.734659 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/556682c7-4e9f-4c67-a636-edd244a30638-catalog-content\") pod \"556682c7-4e9f-4c67-a636-edd244a30638\" (UID: \"556682c7-4e9f-4c67-a636-edd244a30638\") " Oct 06 14:32:02 crc kubenswrapper[4757]: I1006 14:32:02.734768 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/556682c7-4e9f-4c67-a636-edd244a30638-utilities\") pod \"556682c7-4e9f-4c67-a636-edd244a30638\" (UID: \"556682c7-4e9f-4c67-a636-edd244a30638\") " Oct 06 14:32:02 crc kubenswrapper[4757]: I1006 14:32:02.734912 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dhj54\" (UniqueName: \"kubernetes.io/projected/556682c7-4e9f-4c67-a636-edd244a30638-kube-api-access-dhj54\") pod \"556682c7-4e9f-4c67-a636-edd244a30638\" (UID: \"556682c7-4e9f-4c67-a636-edd244a30638\") " Oct 06 14:32:02 crc kubenswrapper[4757]: I1006 14:32:02.735889 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/556682c7-4e9f-4c67-a636-edd244a30638-utilities" (OuterVolumeSpecName: "utilities") pod "556682c7-4e9f-4c67-a636-edd244a30638" (UID: "556682c7-4e9f-4c67-a636-edd244a30638"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 14:32:02 crc kubenswrapper[4757]: I1006 14:32:02.742294 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/556682c7-4e9f-4c67-a636-edd244a30638-kube-api-access-dhj54" (OuterVolumeSpecName: "kube-api-access-dhj54") pod "556682c7-4e9f-4c67-a636-edd244a30638" (UID: "556682c7-4e9f-4c67-a636-edd244a30638"). InnerVolumeSpecName "kube-api-access-dhj54". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 14:32:02 crc kubenswrapper[4757]: I1006 14:32:02.745237 4757 scope.go:117] "RemoveContainer" containerID="d72cc9cb4b07493426fd40f046871c510fbc0fd2e17128e857ea840c8b7294b8" Oct 06 14:32:02 crc kubenswrapper[4757]: I1006 14:32:02.776278 4757 scope.go:117] "RemoveContainer" containerID="ec4dc336b782c5db2c03491e826a5090fc2789a000b637c0368e26eaf635ca01" Oct 06 14:32:02 crc kubenswrapper[4757]: I1006 14:32:02.806584 4757 scope.go:117] "RemoveContainer" containerID="e2e4a669a0c36f1bd5723ac6cf4662bd1291bf483c99ef1149c14d3d152e5334" Oct 06 14:32:02 crc kubenswrapper[4757]: E1006 14:32:02.807030 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e2e4a669a0c36f1bd5723ac6cf4662bd1291bf483c99ef1149c14d3d152e5334\": container with ID starting with e2e4a669a0c36f1bd5723ac6cf4662bd1291bf483c99ef1149c14d3d152e5334 not found: ID does not exist" containerID="e2e4a669a0c36f1bd5723ac6cf4662bd1291bf483c99ef1149c14d3d152e5334" Oct 06 14:32:02 crc kubenswrapper[4757]: I1006 14:32:02.807066 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e2e4a669a0c36f1bd5723ac6cf4662bd1291bf483c99ef1149c14d3d152e5334"} err="failed to get container status \"e2e4a669a0c36f1bd5723ac6cf4662bd1291bf483c99ef1149c14d3d152e5334\": rpc error: code = NotFound desc = could not find container \"e2e4a669a0c36f1bd5723ac6cf4662bd1291bf483c99ef1149c14d3d152e5334\": container with ID starting with e2e4a669a0c36f1bd5723ac6cf4662bd1291bf483c99ef1149c14d3d152e5334 not found: ID does not exist" Oct 06 14:32:02 crc kubenswrapper[4757]: I1006 14:32:02.807103 4757 scope.go:117] "RemoveContainer" containerID="d72cc9cb4b07493426fd40f046871c510fbc0fd2e17128e857ea840c8b7294b8" Oct 06 14:32:02 crc kubenswrapper[4757]: E1006 14:32:02.807556 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d72cc9cb4b07493426fd40f046871c510fbc0fd2e17128e857ea840c8b7294b8\": container with ID starting with d72cc9cb4b07493426fd40f046871c510fbc0fd2e17128e857ea840c8b7294b8 not found: ID does not exist" containerID="d72cc9cb4b07493426fd40f046871c510fbc0fd2e17128e857ea840c8b7294b8" Oct 06 14:32:02 crc kubenswrapper[4757]: I1006 14:32:02.807598 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d72cc9cb4b07493426fd40f046871c510fbc0fd2e17128e857ea840c8b7294b8"} err="failed to get container status \"d72cc9cb4b07493426fd40f046871c510fbc0fd2e17128e857ea840c8b7294b8\": rpc error: code = NotFound desc = could not find container \"d72cc9cb4b07493426fd40f046871c510fbc0fd2e17128e857ea840c8b7294b8\": container with ID starting with d72cc9cb4b07493426fd40f046871c510fbc0fd2e17128e857ea840c8b7294b8 not found: ID does not exist" Oct 06 14:32:02 crc kubenswrapper[4757]: I1006 14:32:02.807616 4757 scope.go:117] "RemoveContainer" containerID="ec4dc336b782c5db2c03491e826a5090fc2789a000b637c0368e26eaf635ca01" Oct 06 14:32:02 crc kubenswrapper[4757]: E1006 14:32:02.807967 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ec4dc336b782c5db2c03491e826a5090fc2789a000b637c0368e26eaf635ca01\": container with ID starting with ec4dc336b782c5db2c03491e826a5090fc2789a000b637c0368e26eaf635ca01 not found: ID does not exist" containerID="ec4dc336b782c5db2c03491e826a5090fc2789a000b637c0368e26eaf635ca01" Oct 06 14:32:02 crc kubenswrapper[4757]: I1006 14:32:02.807991 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ec4dc336b782c5db2c03491e826a5090fc2789a000b637c0368e26eaf635ca01"} err="failed to get container status \"ec4dc336b782c5db2c03491e826a5090fc2789a000b637c0368e26eaf635ca01\": rpc error: code = NotFound desc = could not find container \"ec4dc336b782c5db2c03491e826a5090fc2789a000b637c0368e26eaf635ca01\": container with ID starting with ec4dc336b782c5db2c03491e826a5090fc2789a000b637c0368e26eaf635ca01 not found: ID does not exist" Oct 06 14:32:02 crc kubenswrapper[4757]: I1006 14:32:02.813117 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/556682c7-4e9f-4c67-a636-edd244a30638-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "556682c7-4e9f-4c67-a636-edd244a30638" (UID: "556682c7-4e9f-4c67-a636-edd244a30638"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 14:32:02 crc kubenswrapper[4757]: I1006 14:32:02.836353 4757 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/556682c7-4e9f-4c67-a636-edd244a30638-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 06 14:32:02 crc kubenswrapper[4757]: I1006 14:32:02.836401 4757 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/556682c7-4e9f-4c67-a636-edd244a30638-utilities\") on node \"crc\" DevicePath \"\"" Oct 06 14:32:02 crc kubenswrapper[4757]: I1006 14:32:02.836413 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dhj54\" (UniqueName: \"kubernetes.io/projected/556682c7-4e9f-4c67-a636-edd244a30638-kube-api-access-dhj54\") on node \"crc\" DevicePath \"\"" Oct 06 14:32:03 crc kubenswrapper[4757]: I1006 14:32:03.066592 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-fg7pm"] Oct 06 14:32:03 crc kubenswrapper[4757]: I1006 14:32:03.071846 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-fg7pm"] Oct 06 14:32:04 crc kubenswrapper[4757]: I1006 14:32:04.193239 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="556682c7-4e9f-4c67-a636-edd244a30638" path="/var/lib/kubelet/pods/556682c7-4e9f-4c67-a636-edd244a30638/volumes" Oct 06 14:32:15 crc kubenswrapper[4757]: I1006 14:32:15.180445 4757 scope.go:117] "RemoveContainer" containerID="99be7ecd47883c550ee964aaf41717a2d5ba03ae000da549f5ab40bf9d61bd40" Oct 06 14:32:15 crc kubenswrapper[4757]: I1006 14:32:15.825651 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" event={"ID":"0010c888-d5ad-4b2b-8309-1647fdf0dee3","Type":"ContainerStarted","Data":"49fa04996e610edd6767f46cf25ec6ef339676efd38ffccc7f869a7cf92cfa5f"} Oct 06 14:34:00 crc kubenswrapper[4757]: I1006 14:34:00.382667 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-xgjf8"] Oct 06 14:34:00 crc kubenswrapper[4757]: E1006 14:34:00.388219 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="556682c7-4e9f-4c67-a636-edd244a30638" containerName="extract-utilities" Oct 06 14:34:00 crc kubenswrapper[4757]: I1006 14:34:00.388277 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="556682c7-4e9f-4c67-a636-edd244a30638" containerName="extract-utilities" Oct 06 14:34:00 crc kubenswrapper[4757]: E1006 14:34:00.388317 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="556682c7-4e9f-4c67-a636-edd244a30638" containerName="registry-server" Oct 06 14:34:00 crc kubenswrapper[4757]: I1006 14:34:00.388327 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="556682c7-4e9f-4c67-a636-edd244a30638" containerName="registry-server" Oct 06 14:34:00 crc kubenswrapper[4757]: E1006 14:34:00.388349 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="556682c7-4e9f-4c67-a636-edd244a30638" containerName="extract-content" Oct 06 14:34:00 crc kubenswrapper[4757]: I1006 14:34:00.388356 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="556682c7-4e9f-4c67-a636-edd244a30638" containerName="extract-content" Oct 06 14:34:00 crc kubenswrapper[4757]: I1006 14:34:00.388584 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="556682c7-4e9f-4c67-a636-edd244a30638" containerName="registry-server" Oct 06 14:34:00 crc kubenswrapper[4757]: I1006 14:34:00.389900 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xgjf8" Oct 06 14:34:00 crc kubenswrapper[4757]: I1006 14:34:00.396423 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-xgjf8"] Oct 06 14:34:00 crc kubenswrapper[4757]: I1006 14:34:00.499809 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f34d1ae5-7328-4578-ac79-7673976330d8-catalog-content\") pod \"community-operators-xgjf8\" (UID: \"f34d1ae5-7328-4578-ac79-7673976330d8\") " pod="openshift-marketplace/community-operators-xgjf8" Oct 06 14:34:00 crc kubenswrapper[4757]: I1006 14:34:00.499867 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zk2cp\" (UniqueName: \"kubernetes.io/projected/f34d1ae5-7328-4578-ac79-7673976330d8-kube-api-access-zk2cp\") pod \"community-operators-xgjf8\" (UID: \"f34d1ae5-7328-4578-ac79-7673976330d8\") " pod="openshift-marketplace/community-operators-xgjf8" Oct 06 14:34:00 crc kubenswrapper[4757]: I1006 14:34:00.500015 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f34d1ae5-7328-4578-ac79-7673976330d8-utilities\") pod \"community-operators-xgjf8\" (UID: \"f34d1ae5-7328-4578-ac79-7673976330d8\") " pod="openshift-marketplace/community-operators-xgjf8" Oct 06 14:34:00 crc kubenswrapper[4757]: I1006 14:34:00.602351 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zk2cp\" (UniqueName: \"kubernetes.io/projected/f34d1ae5-7328-4578-ac79-7673976330d8-kube-api-access-zk2cp\") pod \"community-operators-xgjf8\" (UID: \"f34d1ae5-7328-4578-ac79-7673976330d8\") " pod="openshift-marketplace/community-operators-xgjf8" Oct 06 14:34:00 crc kubenswrapper[4757]: I1006 14:34:00.602546 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f34d1ae5-7328-4578-ac79-7673976330d8-utilities\") pod \"community-operators-xgjf8\" (UID: \"f34d1ae5-7328-4578-ac79-7673976330d8\") " pod="openshift-marketplace/community-operators-xgjf8" Oct 06 14:34:00 crc kubenswrapper[4757]: I1006 14:34:00.602620 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f34d1ae5-7328-4578-ac79-7673976330d8-catalog-content\") pod \"community-operators-xgjf8\" (UID: \"f34d1ae5-7328-4578-ac79-7673976330d8\") " pod="openshift-marketplace/community-operators-xgjf8" Oct 06 14:34:00 crc kubenswrapper[4757]: I1006 14:34:00.603128 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f34d1ae5-7328-4578-ac79-7673976330d8-utilities\") pod \"community-operators-xgjf8\" (UID: \"f34d1ae5-7328-4578-ac79-7673976330d8\") " pod="openshift-marketplace/community-operators-xgjf8" Oct 06 14:34:00 crc kubenswrapper[4757]: I1006 14:34:00.604282 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f34d1ae5-7328-4578-ac79-7673976330d8-catalog-content\") pod \"community-operators-xgjf8\" (UID: \"f34d1ae5-7328-4578-ac79-7673976330d8\") " pod="openshift-marketplace/community-operators-xgjf8" Oct 06 14:34:00 crc kubenswrapper[4757]: I1006 14:34:00.624340 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zk2cp\" (UniqueName: \"kubernetes.io/projected/f34d1ae5-7328-4578-ac79-7673976330d8-kube-api-access-zk2cp\") pod \"community-operators-xgjf8\" (UID: \"f34d1ae5-7328-4578-ac79-7673976330d8\") " pod="openshift-marketplace/community-operators-xgjf8" Oct 06 14:34:00 crc kubenswrapper[4757]: I1006 14:34:00.725068 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xgjf8" Oct 06 14:34:01 crc kubenswrapper[4757]: I1006 14:34:01.185248 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-xgjf8"] Oct 06 14:34:01 crc kubenswrapper[4757]: I1006 14:34:01.668322 4757 generic.go:334] "Generic (PLEG): container finished" podID="f34d1ae5-7328-4578-ac79-7673976330d8" containerID="337936231e994beb346310fb65d6802f59462b23093c6300e4663663b87ae16f" exitCode=0 Oct 06 14:34:01 crc kubenswrapper[4757]: I1006 14:34:01.668390 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xgjf8" event={"ID":"f34d1ae5-7328-4578-ac79-7673976330d8","Type":"ContainerDied","Data":"337936231e994beb346310fb65d6802f59462b23093c6300e4663663b87ae16f"} Oct 06 14:34:01 crc kubenswrapper[4757]: I1006 14:34:01.668459 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xgjf8" event={"ID":"f34d1ae5-7328-4578-ac79-7673976330d8","Type":"ContainerStarted","Data":"327e7601e7cde4e7b5248032b5b34c7b4674966753411bb5917a672490e2a245"} Oct 06 14:34:01 crc kubenswrapper[4757]: I1006 14:34:01.775495 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-267lp"] Oct 06 14:34:01 crc kubenswrapper[4757]: I1006 14:34:01.777172 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-267lp" Oct 06 14:34:01 crc kubenswrapper[4757]: I1006 14:34:01.794200 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-267lp"] Oct 06 14:34:01 crc kubenswrapper[4757]: I1006 14:34:01.923314 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0b37b00d-91de-402a-9df0-90e3f2c97ccb-catalog-content\") pod \"certified-operators-267lp\" (UID: \"0b37b00d-91de-402a-9df0-90e3f2c97ccb\") " pod="openshift-marketplace/certified-operators-267lp" Oct 06 14:34:01 crc kubenswrapper[4757]: I1006 14:34:01.923384 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zsf2t\" (UniqueName: \"kubernetes.io/projected/0b37b00d-91de-402a-9df0-90e3f2c97ccb-kube-api-access-zsf2t\") pod \"certified-operators-267lp\" (UID: \"0b37b00d-91de-402a-9df0-90e3f2c97ccb\") " pod="openshift-marketplace/certified-operators-267lp" Oct 06 14:34:01 crc kubenswrapper[4757]: I1006 14:34:01.923429 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0b37b00d-91de-402a-9df0-90e3f2c97ccb-utilities\") pod \"certified-operators-267lp\" (UID: \"0b37b00d-91de-402a-9df0-90e3f2c97ccb\") " pod="openshift-marketplace/certified-operators-267lp" Oct 06 14:34:02 crc kubenswrapper[4757]: I1006 14:34:02.024475 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zsf2t\" (UniqueName: \"kubernetes.io/projected/0b37b00d-91de-402a-9df0-90e3f2c97ccb-kube-api-access-zsf2t\") pod \"certified-operators-267lp\" (UID: \"0b37b00d-91de-402a-9df0-90e3f2c97ccb\") " pod="openshift-marketplace/certified-operators-267lp" Oct 06 14:34:02 crc kubenswrapper[4757]: I1006 14:34:02.024875 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0b37b00d-91de-402a-9df0-90e3f2c97ccb-utilities\") pod \"certified-operators-267lp\" (UID: \"0b37b00d-91de-402a-9df0-90e3f2c97ccb\") " pod="openshift-marketplace/certified-operators-267lp" Oct 06 14:34:02 crc kubenswrapper[4757]: I1006 14:34:02.024960 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0b37b00d-91de-402a-9df0-90e3f2c97ccb-catalog-content\") pod \"certified-operators-267lp\" (UID: \"0b37b00d-91de-402a-9df0-90e3f2c97ccb\") " pod="openshift-marketplace/certified-operators-267lp" Oct 06 14:34:02 crc kubenswrapper[4757]: I1006 14:34:02.025519 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0b37b00d-91de-402a-9df0-90e3f2c97ccb-catalog-content\") pod \"certified-operators-267lp\" (UID: \"0b37b00d-91de-402a-9df0-90e3f2c97ccb\") " pod="openshift-marketplace/certified-operators-267lp" Oct 06 14:34:02 crc kubenswrapper[4757]: I1006 14:34:02.025743 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0b37b00d-91de-402a-9df0-90e3f2c97ccb-utilities\") pod \"certified-operators-267lp\" (UID: \"0b37b00d-91de-402a-9df0-90e3f2c97ccb\") " pod="openshift-marketplace/certified-operators-267lp" Oct 06 14:34:02 crc kubenswrapper[4757]: I1006 14:34:02.054901 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zsf2t\" (UniqueName: \"kubernetes.io/projected/0b37b00d-91de-402a-9df0-90e3f2c97ccb-kube-api-access-zsf2t\") pod \"certified-operators-267lp\" (UID: \"0b37b00d-91de-402a-9df0-90e3f2c97ccb\") " pod="openshift-marketplace/certified-operators-267lp" Oct 06 14:34:02 crc kubenswrapper[4757]: I1006 14:34:02.094863 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-267lp" Oct 06 14:34:02 crc kubenswrapper[4757]: I1006 14:34:02.586400 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-267lp"] Oct 06 14:34:02 crc kubenswrapper[4757]: W1006 14:34:02.592202 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0b37b00d_91de_402a_9df0_90e3f2c97ccb.slice/crio-14d60969acf926beb086d168470a3c33c7d6b88ade31812397dc08c6e0279313 WatchSource:0}: Error finding container 14d60969acf926beb086d168470a3c33c7d6b88ade31812397dc08c6e0279313: Status 404 returned error can't find the container with id 14d60969acf926beb086d168470a3c33c7d6b88ade31812397dc08c6e0279313 Oct 06 14:34:02 crc kubenswrapper[4757]: I1006 14:34:02.682467 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-267lp" event={"ID":"0b37b00d-91de-402a-9df0-90e3f2c97ccb","Type":"ContainerStarted","Data":"14d60969acf926beb086d168470a3c33c7d6b88ade31812397dc08c6e0279313"} Oct 06 14:34:03 crc kubenswrapper[4757]: I1006 14:34:03.696226 4757 generic.go:334] "Generic (PLEG): container finished" podID="0b37b00d-91de-402a-9df0-90e3f2c97ccb" containerID="31ae2262fb469867381b79ebbfbfed2018028337a6ec56ba01418e8825f9f3a2" exitCode=0 Oct 06 14:34:03 crc kubenswrapper[4757]: I1006 14:34:03.696379 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-267lp" event={"ID":"0b37b00d-91de-402a-9df0-90e3f2c97ccb","Type":"ContainerDied","Data":"31ae2262fb469867381b79ebbfbfed2018028337a6ec56ba01418e8825f9f3a2"} Oct 06 14:34:03 crc kubenswrapper[4757]: I1006 14:34:03.698547 4757 generic.go:334] "Generic (PLEG): container finished" podID="f34d1ae5-7328-4578-ac79-7673976330d8" containerID="5bd70ce2fd6ce910ac53c7c8ef3f27685e29a50227b2029ecde0532f3801004b" exitCode=0 Oct 06 14:34:03 crc kubenswrapper[4757]: I1006 14:34:03.698584 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xgjf8" event={"ID":"f34d1ae5-7328-4578-ac79-7673976330d8","Type":"ContainerDied","Data":"5bd70ce2fd6ce910ac53c7c8ef3f27685e29a50227b2029ecde0532f3801004b"} Oct 06 14:34:04 crc kubenswrapper[4757]: I1006 14:34:04.707712 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xgjf8" event={"ID":"f34d1ae5-7328-4578-ac79-7673976330d8","Type":"ContainerStarted","Data":"a54577dd201bf78714f2d79d989b79bfcaac3083db1ad7e251ab29a0afcc6688"} Oct 06 14:34:04 crc kubenswrapper[4757]: I1006 14:34:04.725877 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-xgjf8" podStartSLOduration=2.247093307 podStartE2EDuration="4.725855253s" podCreationTimestamp="2025-10-06 14:34:00 +0000 UTC" firstStartedPulling="2025-10-06 14:34:01.669894229 +0000 UTC m=+3330.167212776" lastFinishedPulling="2025-10-06 14:34:04.148656145 +0000 UTC m=+3332.645974722" observedRunningTime="2025-10-06 14:34:04.725397988 +0000 UTC m=+3333.222716525" watchObservedRunningTime="2025-10-06 14:34:04.725855253 +0000 UTC m=+3333.223173790" Oct 06 14:34:05 crc kubenswrapper[4757]: I1006 14:34:05.721520 4757 generic.go:334] "Generic (PLEG): container finished" podID="0b37b00d-91de-402a-9df0-90e3f2c97ccb" containerID="1816365e719c9fcf7c972d9a05bbbc55bf8e1997d4b840a9092edc05365b1293" exitCode=0 Oct 06 14:34:05 crc kubenswrapper[4757]: I1006 14:34:05.721626 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-267lp" event={"ID":"0b37b00d-91de-402a-9df0-90e3f2c97ccb","Type":"ContainerDied","Data":"1816365e719c9fcf7c972d9a05bbbc55bf8e1997d4b840a9092edc05365b1293"} Oct 06 14:34:06 crc kubenswrapper[4757]: I1006 14:34:06.742333 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-267lp" event={"ID":"0b37b00d-91de-402a-9df0-90e3f2c97ccb","Type":"ContainerStarted","Data":"dd52d9801f2a6caf296771c1cbf477f372539c0548b04380c195135b555ed980"} Oct 06 14:34:06 crc kubenswrapper[4757]: I1006 14:34:06.766752 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-267lp" podStartSLOduration=3.367745312 podStartE2EDuration="5.766723887s" podCreationTimestamp="2025-10-06 14:34:01 +0000 UTC" firstStartedPulling="2025-10-06 14:34:03.698604662 +0000 UTC m=+3332.195923199" lastFinishedPulling="2025-10-06 14:34:06.097583237 +0000 UTC m=+3334.594901774" observedRunningTime="2025-10-06 14:34:06.764069762 +0000 UTC m=+3335.261388339" watchObservedRunningTime="2025-10-06 14:34:06.766723887 +0000 UTC m=+3335.264042464" Oct 06 14:34:10 crc kubenswrapper[4757]: I1006 14:34:10.725998 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-xgjf8" Oct 06 14:34:10 crc kubenswrapper[4757]: I1006 14:34:10.726481 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-xgjf8" Oct 06 14:34:10 crc kubenswrapper[4757]: I1006 14:34:10.785347 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-xgjf8" Oct 06 14:34:10 crc kubenswrapper[4757]: I1006 14:34:10.850593 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-xgjf8" Oct 06 14:34:11 crc kubenswrapper[4757]: I1006 14:34:11.023697 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-xgjf8"] Oct 06 14:34:12 crc kubenswrapper[4757]: I1006 14:34:12.095508 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-267lp" Oct 06 14:34:12 crc kubenswrapper[4757]: I1006 14:34:12.095834 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-267lp" Oct 06 14:34:12 crc kubenswrapper[4757]: I1006 14:34:12.140505 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-267lp" Oct 06 14:34:12 crc kubenswrapper[4757]: I1006 14:34:12.790377 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-xgjf8" podUID="f34d1ae5-7328-4578-ac79-7673976330d8" containerName="registry-server" containerID="cri-o://a54577dd201bf78714f2d79d989b79bfcaac3083db1ad7e251ab29a0afcc6688" gracePeriod=2 Oct 06 14:34:12 crc kubenswrapper[4757]: I1006 14:34:12.865324 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-267lp" Oct 06 14:34:13 crc kubenswrapper[4757]: I1006 14:34:13.175605 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xgjf8" Oct 06 14:34:13 crc kubenswrapper[4757]: I1006 14:34:13.286207 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f34d1ae5-7328-4578-ac79-7673976330d8-catalog-content\") pod \"f34d1ae5-7328-4578-ac79-7673976330d8\" (UID: \"f34d1ae5-7328-4578-ac79-7673976330d8\") " Oct 06 14:34:13 crc kubenswrapper[4757]: I1006 14:34:13.286294 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f34d1ae5-7328-4578-ac79-7673976330d8-utilities\") pod \"f34d1ae5-7328-4578-ac79-7673976330d8\" (UID: \"f34d1ae5-7328-4578-ac79-7673976330d8\") " Oct 06 14:34:13 crc kubenswrapper[4757]: I1006 14:34:13.286379 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zk2cp\" (UniqueName: \"kubernetes.io/projected/f34d1ae5-7328-4578-ac79-7673976330d8-kube-api-access-zk2cp\") pod \"f34d1ae5-7328-4578-ac79-7673976330d8\" (UID: \"f34d1ae5-7328-4578-ac79-7673976330d8\") " Oct 06 14:34:13 crc kubenswrapper[4757]: I1006 14:34:13.287242 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f34d1ae5-7328-4578-ac79-7673976330d8-utilities" (OuterVolumeSpecName: "utilities") pod "f34d1ae5-7328-4578-ac79-7673976330d8" (UID: "f34d1ae5-7328-4578-ac79-7673976330d8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 14:34:13 crc kubenswrapper[4757]: I1006 14:34:13.291232 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f34d1ae5-7328-4578-ac79-7673976330d8-kube-api-access-zk2cp" (OuterVolumeSpecName: "kube-api-access-zk2cp") pod "f34d1ae5-7328-4578-ac79-7673976330d8" (UID: "f34d1ae5-7328-4578-ac79-7673976330d8"). InnerVolumeSpecName "kube-api-access-zk2cp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 14:34:13 crc kubenswrapper[4757]: I1006 14:34:13.388516 4757 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f34d1ae5-7328-4578-ac79-7673976330d8-utilities\") on node \"crc\" DevicePath \"\"" Oct 06 14:34:13 crc kubenswrapper[4757]: I1006 14:34:13.388558 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zk2cp\" (UniqueName: \"kubernetes.io/projected/f34d1ae5-7328-4578-ac79-7673976330d8-kube-api-access-zk2cp\") on node \"crc\" DevicePath \"\"" Oct 06 14:34:13 crc kubenswrapper[4757]: I1006 14:34:13.449057 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f34d1ae5-7328-4578-ac79-7673976330d8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f34d1ae5-7328-4578-ac79-7673976330d8" (UID: "f34d1ae5-7328-4578-ac79-7673976330d8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 14:34:13 crc kubenswrapper[4757]: I1006 14:34:13.490228 4757 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f34d1ae5-7328-4578-ac79-7673976330d8-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 06 14:34:13 crc kubenswrapper[4757]: I1006 14:34:13.625459 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-267lp"] Oct 06 14:34:13 crc kubenswrapper[4757]: I1006 14:34:13.798326 4757 generic.go:334] "Generic (PLEG): container finished" podID="f34d1ae5-7328-4578-ac79-7673976330d8" containerID="a54577dd201bf78714f2d79d989b79bfcaac3083db1ad7e251ab29a0afcc6688" exitCode=0 Oct 06 14:34:13 crc kubenswrapper[4757]: I1006 14:34:13.798379 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xgjf8" Oct 06 14:34:13 crc kubenswrapper[4757]: I1006 14:34:13.798423 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xgjf8" event={"ID":"f34d1ae5-7328-4578-ac79-7673976330d8","Type":"ContainerDied","Data":"a54577dd201bf78714f2d79d989b79bfcaac3083db1ad7e251ab29a0afcc6688"} Oct 06 14:34:13 crc kubenswrapper[4757]: I1006 14:34:13.798525 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xgjf8" event={"ID":"f34d1ae5-7328-4578-ac79-7673976330d8","Type":"ContainerDied","Data":"327e7601e7cde4e7b5248032b5b34c7b4674966753411bb5917a672490e2a245"} Oct 06 14:34:13 crc kubenswrapper[4757]: I1006 14:34:13.798556 4757 scope.go:117] "RemoveContainer" containerID="a54577dd201bf78714f2d79d989b79bfcaac3083db1ad7e251ab29a0afcc6688" Oct 06 14:34:13 crc kubenswrapper[4757]: I1006 14:34:13.829167 4757 scope.go:117] "RemoveContainer" containerID="5bd70ce2fd6ce910ac53c7c8ef3f27685e29a50227b2029ecde0532f3801004b" Oct 06 14:34:13 crc kubenswrapper[4757]: I1006 14:34:13.835409 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-xgjf8"] Oct 06 14:34:13 crc kubenswrapper[4757]: I1006 14:34:13.844836 4757 scope.go:117] "RemoveContainer" containerID="337936231e994beb346310fb65d6802f59462b23093c6300e4663663b87ae16f" Oct 06 14:34:13 crc kubenswrapper[4757]: I1006 14:34:13.844926 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-xgjf8"] Oct 06 14:34:13 crc kubenswrapper[4757]: I1006 14:34:13.873754 4757 scope.go:117] "RemoveContainer" containerID="a54577dd201bf78714f2d79d989b79bfcaac3083db1ad7e251ab29a0afcc6688" Oct 06 14:34:13 crc kubenswrapper[4757]: E1006 14:34:13.874281 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a54577dd201bf78714f2d79d989b79bfcaac3083db1ad7e251ab29a0afcc6688\": container with ID starting with a54577dd201bf78714f2d79d989b79bfcaac3083db1ad7e251ab29a0afcc6688 not found: ID does not exist" containerID="a54577dd201bf78714f2d79d989b79bfcaac3083db1ad7e251ab29a0afcc6688" Oct 06 14:34:13 crc kubenswrapper[4757]: I1006 14:34:13.874331 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a54577dd201bf78714f2d79d989b79bfcaac3083db1ad7e251ab29a0afcc6688"} err="failed to get container status \"a54577dd201bf78714f2d79d989b79bfcaac3083db1ad7e251ab29a0afcc6688\": rpc error: code = NotFound desc = could not find container \"a54577dd201bf78714f2d79d989b79bfcaac3083db1ad7e251ab29a0afcc6688\": container with ID starting with a54577dd201bf78714f2d79d989b79bfcaac3083db1ad7e251ab29a0afcc6688 not found: ID does not exist" Oct 06 14:34:13 crc kubenswrapper[4757]: I1006 14:34:13.874363 4757 scope.go:117] "RemoveContainer" containerID="5bd70ce2fd6ce910ac53c7c8ef3f27685e29a50227b2029ecde0532f3801004b" Oct 06 14:34:13 crc kubenswrapper[4757]: E1006 14:34:13.874613 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5bd70ce2fd6ce910ac53c7c8ef3f27685e29a50227b2029ecde0532f3801004b\": container with ID starting with 5bd70ce2fd6ce910ac53c7c8ef3f27685e29a50227b2029ecde0532f3801004b not found: ID does not exist" containerID="5bd70ce2fd6ce910ac53c7c8ef3f27685e29a50227b2029ecde0532f3801004b" Oct 06 14:34:13 crc kubenswrapper[4757]: I1006 14:34:13.874652 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5bd70ce2fd6ce910ac53c7c8ef3f27685e29a50227b2029ecde0532f3801004b"} err="failed to get container status \"5bd70ce2fd6ce910ac53c7c8ef3f27685e29a50227b2029ecde0532f3801004b\": rpc error: code = NotFound desc = could not find container \"5bd70ce2fd6ce910ac53c7c8ef3f27685e29a50227b2029ecde0532f3801004b\": container with ID starting with 5bd70ce2fd6ce910ac53c7c8ef3f27685e29a50227b2029ecde0532f3801004b not found: ID does not exist" Oct 06 14:34:13 crc kubenswrapper[4757]: I1006 14:34:13.874679 4757 scope.go:117] "RemoveContainer" containerID="337936231e994beb346310fb65d6802f59462b23093c6300e4663663b87ae16f" Oct 06 14:34:13 crc kubenswrapper[4757]: E1006 14:34:13.874916 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"337936231e994beb346310fb65d6802f59462b23093c6300e4663663b87ae16f\": container with ID starting with 337936231e994beb346310fb65d6802f59462b23093c6300e4663663b87ae16f not found: ID does not exist" containerID="337936231e994beb346310fb65d6802f59462b23093c6300e4663663b87ae16f" Oct 06 14:34:13 crc kubenswrapper[4757]: I1006 14:34:13.874953 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"337936231e994beb346310fb65d6802f59462b23093c6300e4663663b87ae16f"} err="failed to get container status \"337936231e994beb346310fb65d6802f59462b23093c6300e4663663b87ae16f\": rpc error: code = NotFound desc = could not find container \"337936231e994beb346310fb65d6802f59462b23093c6300e4663663b87ae16f\": container with ID starting with 337936231e994beb346310fb65d6802f59462b23093c6300e4663663b87ae16f not found: ID does not exist" Oct 06 14:34:14 crc kubenswrapper[4757]: I1006 14:34:14.194654 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f34d1ae5-7328-4578-ac79-7673976330d8" path="/var/lib/kubelet/pods/f34d1ae5-7328-4578-ac79-7673976330d8/volumes" Oct 06 14:34:14 crc kubenswrapper[4757]: I1006 14:34:14.810400 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-267lp" podUID="0b37b00d-91de-402a-9df0-90e3f2c97ccb" containerName="registry-server" containerID="cri-o://dd52d9801f2a6caf296771c1cbf477f372539c0548b04380c195135b555ed980" gracePeriod=2 Oct 06 14:34:15 crc kubenswrapper[4757]: I1006 14:34:15.246492 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-267lp" Oct 06 14:34:15 crc kubenswrapper[4757]: I1006 14:34:15.419310 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0b37b00d-91de-402a-9df0-90e3f2c97ccb-utilities\") pod \"0b37b00d-91de-402a-9df0-90e3f2c97ccb\" (UID: \"0b37b00d-91de-402a-9df0-90e3f2c97ccb\") " Oct 06 14:34:15 crc kubenswrapper[4757]: I1006 14:34:15.419424 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zsf2t\" (UniqueName: \"kubernetes.io/projected/0b37b00d-91de-402a-9df0-90e3f2c97ccb-kube-api-access-zsf2t\") pod \"0b37b00d-91de-402a-9df0-90e3f2c97ccb\" (UID: \"0b37b00d-91de-402a-9df0-90e3f2c97ccb\") " Oct 06 14:34:15 crc kubenswrapper[4757]: I1006 14:34:15.419521 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0b37b00d-91de-402a-9df0-90e3f2c97ccb-catalog-content\") pod \"0b37b00d-91de-402a-9df0-90e3f2c97ccb\" (UID: \"0b37b00d-91de-402a-9df0-90e3f2c97ccb\") " Oct 06 14:34:15 crc kubenswrapper[4757]: I1006 14:34:15.420121 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0b37b00d-91de-402a-9df0-90e3f2c97ccb-utilities" (OuterVolumeSpecName: "utilities") pod "0b37b00d-91de-402a-9df0-90e3f2c97ccb" (UID: "0b37b00d-91de-402a-9df0-90e3f2c97ccb"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 14:34:15 crc kubenswrapper[4757]: I1006 14:34:15.425504 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b37b00d-91de-402a-9df0-90e3f2c97ccb-kube-api-access-zsf2t" (OuterVolumeSpecName: "kube-api-access-zsf2t") pod "0b37b00d-91de-402a-9df0-90e3f2c97ccb" (UID: "0b37b00d-91de-402a-9df0-90e3f2c97ccb"). InnerVolumeSpecName "kube-api-access-zsf2t". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 14:34:15 crc kubenswrapper[4757]: I1006 14:34:15.489050 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0b37b00d-91de-402a-9df0-90e3f2c97ccb-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0b37b00d-91de-402a-9df0-90e3f2c97ccb" (UID: "0b37b00d-91de-402a-9df0-90e3f2c97ccb"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 14:34:15 crc kubenswrapper[4757]: I1006 14:34:15.520959 4757 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0b37b00d-91de-402a-9df0-90e3f2c97ccb-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 06 14:34:15 crc kubenswrapper[4757]: I1006 14:34:15.521006 4757 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0b37b00d-91de-402a-9df0-90e3f2c97ccb-utilities\") on node \"crc\" DevicePath \"\"" Oct 06 14:34:15 crc kubenswrapper[4757]: I1006 14:34:15.521023 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zsf2t\" (UniqueName: \"kubernetes.io/projected/0b37b00d-91de-402a-9df0-90e3f2c97ccb-kube-api-access-zsf2t\") on node \"crc\" DevicePath \"\"" Oct 06 14:34:15 crc kubenswrapper[4757]: I1006 14:34:15.821840 4757 generic.go:334] "Generic (PLEG): container finished" podID="0b37b00d-91de-402a-9df0-90e3f2c97ccb" containerID="dd52d9801f2a6caf296771c1cbf477f372539c0548b04380c195135b555ed980" exitCode=0 Oct 06 14:34:15 crc kubenswrapper[4757]: I1006 14:34:15.821906 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-267lp" event={"ID":"0b37b00d-91de-402a-9df0-90e3f2c97ccb","Type":"ContainerDied","Data":"dd52d9801f2a6caf296771c1cbf477f372539c0548b04380c195135b555ed980"} Oct 06 14:34:15 crc kubenswrapper[4757]: I1006 14:34:15.821940 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-267lp" event={"ID":"0b37b00d-91de-402a-9df0-90e3f2c97ccb","Type":"ContainerDied","Data":"14d60969acf926beb086d168470a3c33c7d6b88ade31812397dc08c6e0279313"} Oct 06 14:34:15 crc kubenswrapper[4757]: I1006 14:34:15.821966 4757 scope.go:117] "RemoveContainer" containerID="dd52d9801f2a6caf296771c1cbf477f372539c0548b04380c195135b555ed980" Oct 06 14:34:15 crc kubenswrapper[4757]: I1006 14:34:15.821970 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-267lp" Oct 06 14:34:15 crc kubenswrapper[4757]: I1006 14:34:15.854784 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-267lp"] Oct 06 14:34:15 crc kubenswrapper[4757]: I1006 14:34:15.857114 4757 scope.go:117] "RemoveContainer" containerID="1816365e719c9fcf7c972d9a05bbbc55bf8e1997d4b840a9092edc05365b1293" Oct 06 14:34:15 crc kubenswrapper[4757]: I1006 14:34:15.862280 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-267lp"] Oct 06 14:34:15 crc kubenswrapper[4757]: I1006 14:34:15.894910 4757 scope.go:117] "RemoveContainer" containerID="31ae2262fb469867381b79ebbfbfed2018028337a6ec56ba01418e8825f9f3a2" Oct 06 14:34:15 crc kubenswrapper[4757]: I1006 14:34:15.928110 4757 scope.go:117] "RemoveContainer" containerID="dd52d9801f2a6caf296771c1cbf477f372539c0548b04380c195135b555ed980" Oct 06 14:34:15 crc kubenswrapper[4757]: E1006 14:34:15.928948 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dd52d9801f2a6caf296771c1cbf477f372539c0548b04380c195135b555ed980\": container with ID starting with dd52d9801f2a6caf296771c1cbf477f372539c0548b04380c195135b555ed980 not found: ID does not exist" containerID="dd52d9801f2a6caf296771c1cbf477f372539c0548b04380c195135b555ed980" Oct 06 14:34:15 crc kubenswrapper[4757]: I1006 14:34:15.928984 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dd52d9801f2a6caf296771c1cbf477f372539c0548b04380c195135b555ed980"} err="failed to get container status \"dd52d9801f2a6caf296771c1cbf477f372539c0548b04380c195135b555ed980\": rpc error: code = NotFound desc = could not find container \"dd52d9801f2a6caf296771c1cbf477f372539c0548b04380c195135b555ed980\": container with ID starting with dd52d9801f2a6caf296771c1cbf477f372539c0548b04380c195135b555ed980 not found: ID does not exist" Oct 06 14:34:15 crc kubenswrapper[4757]: I1006 14:34:15.929022 4757 scope.go:117] "RemoveContainer" containerID="1816365e719c9fcf7c972d9a05bbbc55bf8e1997d4b840a9092edc05365b1293" Oct 06 14:34:15 crc kubenswrapper[4757]: E1006 14:34:15.929549 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1816365e719c9fcf7c972d9a05bbbc55bf8e1997d4b840a9092edc05365b1293\": container with ID starting with 1816365e719c9fcf7c972d9a05bbbc55bf8e1997d4b840a9092edc05365b1293 not found: ID does not exist" containerID="1816365e719c9fcf7c972d9a05bbbc55bf8e1997d4b840a9092edc05365b1293" Oct 06 14:34:15 crc kubenswrapper[4757]: I1006 14:34:15.929575 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1816365e719c9fcf7c972d9a05bbbc55bf8e1997d4b840a9092edc05365b1293"} err="failed to get container status \"1816365e719c9fcf7c972d9a05bbbc55bf8e1997d4b840a9092edc05365b1293\": rpc error: code = NotFound desc = could not find container \"1816365e719c9fcf7c972d9a05bbbc55bf8e1997d4b840a9092edc05365b1293\": container with ID starting with 1816365e719c9fcf7c972d9a05bbbc55bf8e1997d4b840a9092edc05365b1293 not found: ID does not exist" Oct 06 14:34:15 crc kubenswrapper[4757]: I1006 14:34:15.929594 4757 scope.go:117] "RemoveContainer" containerID="31ae2262fb469867381b79ebbfbfed2018028337a6ec56ba01418e8825f9f3a2" Oct 06 14:34:15 crc kubenswrapper[4757]: E1006 14:34:15.929943 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"31ae2262fb469867381b79ebbfbfed2018028337a6ec56ba01418e8825f9f3a2\": container with ID starting with 31ae2262fb469867381b79ebbfbfed2018028337a6ec56ba01418e8825f9f3a2 not found: ID does not exist" containerID="31ae2262fb469867381b79ebbfbfed2018028337a6ec56ba01418e8825f9f3a2" Oct 06 14:34:15 crc kubenswrapper[4757]: I1006 14:34:15.929973 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"31ae2262fb469867381b79ebbfbfed2018028337a6ec56ba01418e8825f9f3a2"} err="failed to get container status \"31ae2262fb469867381b79ebbfbfed2018028337a6ec56ba01418e8825f9f3a2\": rpc error: code = NotFound desc = could not find container \"31ae2262fb469867381b79ebbfbfed2018028337a6ec56ba01418e8825f9f3a2\": container with ID starting with 31ae2262fb469867381b79ebbfbfed2018028337a6ec56ba01418e8825f9f3a2 not found: ID does not exist" Oct 06 14:34:16 crc kubenswrapper[4757]: I1006 14:34:16.190526 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b37b00d-91de-402a-9df0-90e3f2c97ccb" path="/var/lib/kubelet/pods/0b37b00d-91de-402a-9df0-90e3f2c97ccb/volumes" Oct 06 14:34:34 crc kubenswrapper[4757]: I1006 14:34:34.361410 4757 patch_prober.go:28] interesting pod/machine-config-daemon-7tb7h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 06 14:34:34 crc kubenswrapper[4757]: I1006 14:34:34.362040 4757 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 06 14:35:04 crc kubenswrapper[4757]: I1006 14:35:04.360910 4757 patch_prober.go:28] interesting pod/machine-config-daemon-7tb7h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 06 14:35:04 crc kubenswrapper[4757]: I1006 14:35:04.361640 4757 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 06 14:35:34 crc kubenswrapper[4757]: I1006 14:35:34.361448 4757 patch_prober.go:28] interesting pod/machine-config-daemon-7tb7h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 06 14:35:34 crc kubenswrapper[4757]: I1006 14:35:34.361924 4757 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 06 14:35:34 crc kubenswrapper[4757]: I1006 14:35:34.361967 4757 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" Oct 06 14:35:34 crc kubenswrapper[4757]: I1006 14:35:34.362406 4757 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"49fa04996e610edd6767f46cf25ec6ef339676efd38ffccc7f869a7cf92cfa5f"} pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 06 14:35:34 crc kubenswrapper[4757]: I1006 14:35:34.362454 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" containerName="machine-config-daemon" containerID="cri-o://49fa04996e610edd6767f46cf25ec6ef339676efd38ffccc7f869a7cf92cfa5f" gracePeriod=600 Oct 06 14:35:35 crc kubenswrapper[4757]: I1006 14:35:35.431899 4757 generic.go:334] "Generic (PLEG): container finished" podID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" containerID="49fa04996e610edd6767f46cf25ec6ef339676efd38ffccc7f869a7cf92cfa5f" exitCode=0 Oct 06 14:35:35 crc kubenswrapper[4757]: I1006 14:35:35.431940 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" event={"ID":"0010c888-d5ad-4b2b-8309-1647fdf0dee3","Type":"ContainerDied","Data":"49fa04996e610edd6767f46cf25ec6ef339676efd38ffccc7f869a7cf92cfa5f"} Oct 06 14:35:35 crc kubenswrapper[4757]: I1006 14:35:35.432547 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" event={"ID":"0010c888-d5ad-4b2b-8309-1647fdf0dee3","Type":"ContainerStarted","Data":"e4a3d926d7d9790c8357af35f6f26cc4d65322931308c6a5e6391517071fd065"} Oct 06 14:35:35 crc kubenswrapper[4757]: I1006 14:35:35.432565 4757 scope.go:117] "RemoveContainer" containerID="99be7ecd47883c550ee964aaf41717a2d5ba03ae000da549f5ab40bf9d61bd40" Oct 06 14:37:34 crc kubenswrapper[4757]: I1006 14:37:34.361118 4757 patch_prober.go:28] interesting pod/machine-config-daemon-7tb7h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 06 14:37:34 crc kubenswrapper[4757]: I1006 14:37:34.361713 4757 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 06 14:38:04 crc kubenswrapper[4757]: I1006 14:38:04.360722 4757 patch_prober.go:28] interesting pod/machine-config-daemon-7tb7h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 06 14:38:04 crc kubenswrapper[4757]: I1006 14:38:04.361372 4757 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 06 14:38:34 crc kubenswrapper[4757]: I1006 14:38:34.361556 4757 patch_prober.go:28] interesting pod/machine-config-daemon-7tb7h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 06 14:38:34 crc kubenswrapper[4757]: I1006 14:38:34.362187 4757 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 06 14:38:34 crc kubenswrapper[4757]: I1006 14:38:34.362239 4757 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" Oct 06 14:38:34 crc kubenswrapper[4757]: I1006 14:38:34.362922 4757 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"e4a3d926d7d9790c8357af35f6f26cc4d65322931308c6a5e6391517071fd065"} pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 06 14:38:34 crc kubenswrapper[4757]: I1006 14:38:34.362996 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" containerName="machine-config-daemon" containerID="cri-o://e4a3d926d7d9790c8357af35f6f26cc4d65322931308c6a5e6391517071fd065" gracePeriod=600 Oct 06 14:38:34 crc kubenswrapper[4757]: E1006 14:38:34.491042 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:38:34 crc kubenswrapper[4757]: I1006 14:38:34.840996 4757 generic.go:334] "Generic (PLEG): container finished" podID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" containerID="e4a3d926d7d9790c8357af35f6f26cc4d65322931308c6a5e6391517071fd065" exitCode=0 Oct 06 14:38:34 crc kubenswrapper[4757]: I1006 14:38:34.841047 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" event={"ID":"0010c888-d5ad-4b2b-8309-1647fdf0dee3","Type":"ContainerDied","Data":"e4a3d926d7d9790c8357af35f6f26cc4d65322931308c6a5e6391517071fd065"} Oct 06 14:38:34 crc kubenswrapper[4757]: I1006 14:38:34.841207 4757 scope.go:117] "RemoveContainer" containerID="49fa04996e610edd6767f46cf25ec6ef339676efd38ffccc7f869a7cf92cfa5f" Oct 06 14:38:34 crc kubenswrapper[4757]: I1006 14:38:34.841986 4757 scope.go:117] "RemoveContainer" containerID="e4a3d926d7d9790c8357af35f6f26cc4d65322931308c6a5e6391517071fd065" Oct 06 14:38:34 crc kubenswrapper[4757]: E1006 14:38:34.842592 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:38:50 crc kubenswrapper[4757]: I1006 14:38:50.181064 4757 scope.go:117] "RemoveContainer" containerID="e4a3d926d7d9790c8357af35f6f26cc4d65322931308c6a5e6391517071fd065" Oct 06 14:38:50 crc kubenswrapper[4757]: E1006 14:38:50.182064 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:39:02 crc kubenswrapper[4757]: I1006 14:39:02.187562 4757 scope.go:117] "RemoveContainer" containerID="e4a3d926d7d9790c8357af35f6f26cc4d65322931308c6a5e6391517071fd065" Oct 06 14:39:02 crc kubenswrapper[4757]: E1006 14:39:02.188610 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:39:14 crc kubenswrapper[4757]: I1006 14:39:14.180314 4757 scope.go:117] "RemoveContainer" containerID="e4a3d926d7d9790c8357af35f6f26cc4d65322931308c6a5e6391517071fd065" Oct 06 14:39:14 crc kubenswrapper[4757]: E1006 14:39:14.182135 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:39:25 crc kubenswrapper[4757]: I1006 14:39:25.180398 4757 scope.go:117] "RemoveContainer" containerID="e4a3d926d7d9790c8357af35f6f26cc4d65322931308c6a5e6391517071fd065" Oct 06 14:39:25 crc kubenswrapper[4757]: E1006 14:39:25.181375 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:39:38 crc kubenswrapper[4757]: I1006 14:39:38.180631 4757 scope.go:117] "RemoveContainer" containerID="e4a3d926d7d9790c8357af35f6f26cc4d65322931308c6a5e6391517071fd065" Oct 06 14:39:38 crc kubenswrapper[4757]: E1006 14:39:38.181580 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:39:49 crc kubenswrapper[4757]: I1006 14:39:49.180070 4757 scope.go:117] "RemoveContainer" containerID="e4a3d926d7d9790c8357af35f6f26cc4d65322931308c6a5e6391517071fd065" Oct 06 14:39:49 crc kubenswrapper[4757]: E1006 14:39:49.180900 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:40:02 crc kubenswrapper[4757]: I1006 14:40:02.186126 4757 scope.go:117] "RemoveContainer" containerID="e4a3d926d7d9790c8357af35f6f26cc4d65322931308c6a5e6391517071fd065" Oct 06 14:40:02 crc kubenswrapper[4757]: E1006 14:40:02.187048 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:40:13 crc kubenswrapper[4757]: I1006 14:40:13.180553 4757 scope.go:117] "RemoveContainer" containerID="e4a3d926d7d9790c8357af35f6f26cc4d65322931308c6a5e6391517071fd065" Oct 06 14:40:13 crc kubenswrapper[4757]: E1006 14:40:13.182879 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:40:26 crc kubenswrapper[4757]: I1006 14:40:26.179891 4757 scope.go:117] "RemoveContainer" containerID="e4a3d926d7d9790c8357af35f6f26cc4d65322931308c6a5e6391517071fd065" Oct 06 14:40:26 crc kubenswrapper[4757]: E1006 14:40:26.180649 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:40:41 crc kubenswrapper[4757]: I1006 14:40:41.180180 4757 scope.go:117] "RemoveContainer" containerID="e4a3d926d7d9790c8357af35f6f26cc4d65322931308c6a5e6391517071fd065" Oct 06 14:40:41 crc kubenswrapper[4757]: E1006 14:40:41.181229 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:40:52 crc kubenswrapper[4757]: I1006 14:40:52.186064 4757 scope.go:117] "RemoveContainer" containerID="e4a3d926d7d9790c8357af35f6f26cc4d65322931308c6a5e6391517071fd065" Oct 06 14:40:52 crc kubenswrapper[4757]: E1006 14:40:52.186959 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:41:04 crc kubenswrapper[4757]: I1006 14:41:04.185729 4757 scope.go:117] "RemoveContainer" containerID="e4a3d926d7d9790c8357af35f6f26cc4d65322931308c6a5e6391517071fd065" Oct 06 14:41:04 crc kubenswrapper[4757]: E1006 14:41:04.186437 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:41:18 crc kubenswrapper[4757]: I1006 14:41:18.179756 4757 scope.go:117] "RemoveContainer" containerID="e4a3d926d7d9790c8357af35f6f26cc4d65322931308c6a5e6391517071fd065" Oct 06 14:41:18 crc kubenswrapper[4757]: E1006 14:41:18.180533 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:41:32 crc kubenswrapper[4757]: I1006 14:41:32.188818 4757 scope.go:117] "RemoveContainer" containerID="e4a3d926d7d9790c8357af35f6f26cc4d65322931308c6a5e6391517071fd065" Oct 06 14:41:32 crc kubenswrapper[4757]: E1006 14:41:32.190033 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:41:45 crc kubenswrapper[4757]: I1006 14:41:45.180266 4757 scope.go:117] "RemoveContainer" containerID="e4a3d926d7d9790c8357af35f6f26cc4d65322931308c6a5e6391517071fd065" Oct 06 14:41:45 crc kubenswrapper[4757]: E1006 14:41:45.181133 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:41:59 crc kubenswrapper[4757]: I1006 14:41:59.180985 4757 scope.go:117] "RemoveContainer" containerID="e4a3d926d7d9790c8357af35f6f26cc4d65322931308c6a5e6391517071fd065" Oct 06 14:41:59 crc kubenswrapper[4757]: E1006 14:41:59.181808 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:42:06 crc kubenswrapper[4757]: I1006 14:42:06.940567 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-5jlxq"] Oct 06 14:42:06 crc kubenswrapper[4757]: E1006 14:42:06.941398 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0b37b00d-91de-402a-9df0-90e3f2c97ccb" containerName="registry-server" Oct 06 14:42:06 crc kubenswrapper[4757]: I1006 14:42:06.941412 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="0b37b00d-91de-402a-9df0-90e3f2c97ccb" containerName="registry-server" Oct 06 14:42:06 crc kubenswrapper[4757]: E1006 14:42:06.941432 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0b37b00d-91de-402a-9df0-90e3f2c97ccb" containerName="extract-utilities" Oct 06 14:42:06 crc kubenswrapper[4757]: I1006 14:42:06.941441 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="0b37b00d-91de-402a-9df0-90e3f2c97ccb" containerName="extract-utilities" Oct 06 14:42:06 crc kubenswrapper[4757]: E1006 14:42:06.941459 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f34d1ae5-7328-4578-ac79-7673976330d8" containerName="registry-server" Oct 06 14:42:06 crc kubenswrapper[4757]: I1006 14:42:06.941467 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="f34d1ae5-7328-4578-ac79-7673976330d8" containerName="registry-server" Oct 06 14:42:06 crc kubenswrapper[4757]: E1006 14:42:06.941487 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f34d1ae5-7328-4578-ac79-7673976330d8" containerName="extract-content" Oct 06 14:42:06 crc kubenswrapper[4757]: I1006 14:42:06.941495 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="f34d1ae5-7328-4578-ac79-7673976330d8" containerName="extract-content" Oct 06 14:42:06 crc kubenswrapper[4757]: E1006 14:42:06.941512 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f34d1ae5-7328-4578-ac79-7673976330d8" containerName="extract-utilities" Oct 06 14:42:06 crc kubenswrapper[4757]: I1006 14:42:06.941520 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="f34d1ae5-7328-4578-ac79-7673976330d8" containerName="extract-utilities" Oct 06 14:42:06 crc kubenswrapper[4757]: E1006 14:42:06.941542 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0b37b00d-91de-402a-9df0-90e3f2c97ccb" containerName="extract-content" Oct 06 14:42:06 crc kubenswrapper[4757]: I1006 14:42:06.941550 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="0b37b00d-91de-402a-9df0-90e3f2c97ccb" containerName="extract-content" Oct 06 14:42:06 crc kubenswrapper[4757]: I1006 14:42:06.941724 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="0b37b00d-91de-402a-9df0-90e3f2c97ccb" containerName="registry-server" Oct 06 14:42:06 crc kubenswrapper[4757]: I1006 14:42:06.941747 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="f34d1ae5-7328-4578-ac79-7673976330d8" containerName="registry-server" Oct 06 14:42:06 crc kubenswrapper[4757]: I1006 14:42:06.943016 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5jlxq" Oct 06 14:42:06 crc kubenswrapper[4757]: I1006 14:42:06.973670 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-5jlxq"] Oct 06 14:42:07 crc kubenswrapper[4757]: I1006 14:42:07.108043 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3e53fd88-4b6b-42ad-9c45-368a0346fe8e-catalog-content\") pod \"redhat-marketplace-5jlxq\" (UID: \"3e53fd88-4b6b-42ad-9c45-368a0346fe8e\") " pod="openshift-marketplace/redhat-marketplace-5jlxq" Oct 06 14:42:07 crc kubenswrapper[4757]: I1006 14:42:07.108090 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3e53fd88-4b6b-42ad-9c45-368a0346fe8e-utilities\") pod \"redhat-marketplace-5jlxq\" (UID: \"3e53fd88-4b6b-42ad-9c45-368a0346fe8e\") " pod="openshift-marketplace/redhat-marketplace-5jlxq" Oct 06 14:42:07 crc kubenswrapper[4757]: I1006 14:42:07.108205 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pzl5j\" (UniqueName: \"kubernetes.io/projected/3e53fd88-4b6b-42ad-9c45-368a0346fe8e-kube-api-access-pzl5j\") pod \"redhat-marketplace-5jlxq\" (UID: \"3e53fd88-4b6b-42ad-9c45-368a0346fe8e\") " pod="openshift-marketplace/redhat-marketplace-5jlxq" Oct 06 14:42:07 crc kubenswrapper[4757]: I1006 14:42:07.209594 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pzl5j\" (UniqueName: \"kubernetes.io/projected/3e53fd88-4b6b-42ad-9c45-368a0346fe8e-kube-api-access-pzl5j\") pod \"redhat-marketplace-5jlxq\" (UID: \"3e53fd88-4b6b-42ad-9c45-368a0346fe8e\") " pod="openshift-marketplace/redhat-marketplace-5jlxq" Oct 06 14:42:07 crc kubenswrapper[4757]: I1006 14:42:07.209696 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3e53fd88-4b6b-42ad-9c45-368a0346fe8e-catalog-content\") pod \"redhat-marketplace-5jlxq\" (UID: \"3e53fd88-4b6b-42ad-9c45-368a0346fe8e\") " pod="openshift-marketplace/redhat-marketplace-5jlxq" Oct 06 14:42:07 crc kubenswrapper[4757]: I1006 14:42:07.209726 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3e53fd88-4b6b-42ad-9c45-368a0346fe8e-utilities\") pod \"redhat-marketplace-5jlxq\" (UID: \"3e53fd88-4b6b-42ad-9c45-368a0346fe8e\") " pod="openshift-marketplace/redhat-marketplace-5jlxq" Oct 06 14:42:07 crc kubenswrapper[4757]: I1006 14:42:07.210175 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3e53fd88-4b6b-42ad-9c45-368a0346fe8e-catalog-content\") pod \"redhat-marketplace-5jlxq\" (UID: \"3e53fd88-4b6b-42ad-9c45-368a0346fe8e\") " pod="openshift-marketplace/redhat-marketplace-5jlxq" Oct 06 14:42:07 crc kubenswrapper[4757]: I1006 14:42:07.210424 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3e53fd88-4b6b-42ad-9c45-368a0346fe8e-utilities\") pod \"redhat-marketplace-5jlxq\" (UID: \"3e53fd88-4b6b-42ad-9c45-368a0346fe8e\") " pod="openshift-marketplace/redhat-marketplace-5jlxq" Oct 06 14:42:07 crc kubenswrapper[4757]: I1006 14:42:07.232997 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pzl5j\" (UniqueName: \"kubernetes.io/projected/3e53fd88-4b6b-42ad-9c45-368a0346fe8e-kube-api-access-pzl5j\") pod \"redhat-marketplace-5jlxq\" (UID: \"3e53fd88-4b6b-42ad-9c45-368a0346fe8e\") " pod="openshift-marketplace/redhat-marketplace-5jlxq" Oct 06 14:42:07 crc kubenswrapper[4757]: I1006 14:42:07.275391 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5jlxq" Oct 06 14:42:07 crc kubenswrapper[4757]: I1006 14:42:07.766324 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-5jlxq"] Oct 06 14:42:08 crc kubenswrapper[4757]: I1006 14:42:08.788334 4757 generic.go:334] "Generic (PLEG): container finished" podID="3e53fd88-4b6b-42ad-9c45-368a0346fe8e" containerID="f66ee12ae764eb1edaa8a5ce86e7dada5270abd63e4d8e4ee80392551a7880f7" exitCode=0 Oct 06 14:42:08 crc kubenswrapper[4757]: I1006 14:42:08.788411 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5jlxq" event={"ID":"3e53fd88-4b6b-42ad-9c45-368a0346fe8e","Type":"ContainerDied","Data":"f66ee12ae764eb1edaa8a5ce86e7dada5270abd63e4d8e4ee80392551a7880f7"} Oct 06 14:42:08 crc kubenswrapper[4757]: I1006 14:42:08.788458 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5jlxq" event={"ID":"3e53fd88-4b6b-42ad-9c45-368a0346fe8e","Type":"ContainerStarted","Data":"85c08cee2070efbe93c670e3366a41a6dd188c612212aafd501935143ea703b7"} Oct 06 14:42:08 crc kubenswrapper[4757]: I1006 14:42:08.791391 4757 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 06 14:42:09 crc kubenswrapper[4757]: I1006 14:42:09.800889 4757 generic.go:334] "Generic (PLEG): container finished" podID="3e53fd88-4b6b-42ad-9c45-368a0346fe8e" containerID="3144dc342cc3ac6c8f0b1e8b9fce215c45fc1cbffa2c3f15d82a935aa3d479ae" exitCode=0 Oct 06 14:42:09 crc kubenswrapper[4757]: I1006 14:42:09.801041 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5jlxq" event={"ID":"3e53fd88-4b6b-42ad-9c45-368a0346fe8e","Type":"ContainerDied","Data":"3144dc342cc3ac6c8f0b1e8b9fce215c45fc1cbffa2c3f15d82a935aa3d479ae"} Oct 06 14:42:10 crc kubenswrapper[4757]: I1006 14:42:10.814924 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5jlxq" event={"ID":"3e53fd88-4b6b-42ad-9c45-368a0346fe8e","Type":"ContainerStarted","Data":"9361b1e1528928bcd4429ba3262f7724da9de74df6d60c2ac3008e7e2819b808"} Oct 06 14:42:10 crc kubenswrapper[4757]: I1006 14:42:10.842190 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-5jlxq" podStartSLOduration=3.415427965 podStartE2EDuration="4.842163923s" podCreationTimestamp="2025-10-06 14:42:06 +0000 UTC" firstStartedPulling="2025-10-06 14:42:08.790503787 +0000 UTC m=+3817.287822364" lastFinishedPulling="2025-10-06 14:42:10.217239745 +0000 UTC m=+3818.714558322" observedRunningTime="2025-10-06 14:42:10.834692392 +0000 UTC m=+3819.332010979" watchObservedRunningTime="2025-10-06 14:42:10.842163923 +0000 UTC m=+3819.339482470" Oct 06 14:42:14 crc kubenswrapper[4757]: I1006 14:42:14.181254 4757 scope.go:117] "RemoveContainer" containerID="e4a3d926d7d9790c8357af35f6f26cc4d65322931308c6a5e6391517071fd065" Oct 06 14:42:14 crc kubenswrapper[4757]: E1006 14:42:14.181991 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:42:17 crc kubenswrapper[4757]: I1006 14:42:17.276214 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-5jlxq" Oct 06 14:42:17 crc kubenswrapper[4757]: I1006 14:42:17.276663 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-5jlxq" Oct 06 14:42:17 crc kubenswrapper[4757]: I1006 14:42:17.354153 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-5jlxq" Oct 06 14:42:17 crc kubenswrapper[4757]: I1006 14:42:17.958163 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-5jlxq" Oct 06 14:42:18 crc kubenswrapper[4757]: I1006 14:42:18.024829 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-5jlxq"] Oct 06 14:42:19 crc kubenswrapper[4757]: I1006 14:42:19.900934 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-5jlxq" podUID="3e53fd88-4b6b-42ad-9c45-368a0346fe8e" containerName="registry-server" containerID="cri-o://9361b1e1528928bcd4429ba3262f7724da9de74df6d60c2ac3008e7e2819b808" gracePeriod=2 Oct 06 14:42:20 crc kubenswrapper[4757]: I1006 14:42:20.403773 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5jlxq" Oct 06 14:42:20 crc kubenswrapper[4757]: I1006 14:42:20.555057 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pzl5j\" (UniqueName: \"kubernetes.io/projected/3e53fd88-4b6b-42ad-9c45-368a0346fe8e-kube-api-access-pzl5j\") pod \"3e53fd88-4b6b-42ad-9c45-368a0346fe8e\" (UID: \"3e53fd88-4b6b-42ad-9c45-368a0346fe8e\") " Oct 06 14:42:20 crc kubenswrapper[4757]: I1006 14:42:20.555382 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3e53fd88-4b6b-42ad-9c45-368a0346fe8e-catalog-content\") pod \"3e53fd88-4b6b-42ad-9c45-368a0346fe8e\" (UID: \"3e53fd88-4b6b-42ad-9c45-368a0346fe8e\") " Oct 06 14:42:20 crc kubenswrapper[4757]: I1006 14:42:20.556945 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3e53fd88-4b6b-42ad-9c45-368a0346fe8e-utilities\") pod \"3e53fd88-4b6b-42ad-9c45-368a0346fe8e\" (UID: \"3e53fd88-4b6b-42ad-9c45-368a0346fe8e\") " Oct 06 14:42:20 crc kubenswrapper[4757]: I1006 14:42:20.558085 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3e53fd88-4b6b-42ad-9c45-368a0346fe8e-utilities" (OuterVolumeSpecName: "utilities") pod "3e53fd88-4b6b-42ad-9c45-368a0346fe8e" (UID: "3e53fd88-4b6b-42ad-9c45-368a0346fe8e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 14:42:20 crc kubenswrapper[4757]: I1006 14:42:20.578146 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3e53fd88-4b6b-42ad-9c45-368a0346fe8e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3e53fd88-4b6b-42ad-9c45-368a0346fe8e" (UID: "3e53fd88-4b6b-42ad-9c45-368a0346fe8e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 14:42:20 crc kubenswrapper[4757]: I1006 14:42:20.659346 4757 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3e53fd88-4b6b-42ad-9c45-368a0346fe8e-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 06 14:42:20 crc kubenswrapper[4757]: I1006 14:42:20.659730 4757 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3e53fd88-4b6b-42ad-9c45-368a0346fe8e-utilities\") on node \"crc\" DevicePath \"\"" Oct 06 14:42:20 crc kubenswrapper[4757]: I1006 14:42:20.914805 4757 generic.go:334] "Generic (PLEG): container finished" podID="3e53fd88-4b6b-42ad-9c45-368a0346fe8e" containerID="9361b1e1528928bcd4429ba3262f7724da9de74df6d60c2ac3008e7e2819b808" exitCode=0 Oct 06 14:42:20 crc kubenswrapper[4757]: I1006 14:42:20.914870 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5jlxq" event={"ID":"3e53fd88-4b6b-42ad-9c45-368a0346fe8e","Type":"ContainerDied","Data":"9361b1e1528928bcd4429ba3262f7724da9de74df6d60c2ac3008e7e2819b808"} Oct 06 14:42:20 crc kubenswrapper[4757]: I1006 14:42:20.914930 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5jlxq" event={"ID":"3e53fd88-4b6b-42ad-9c45-368a0346fe8e","Type":"ContainerDied","Data":"85c08cee2070efbe93c670e3366a41a6dd188c612212aafd501935143ea703b7"} Oct 06 14:42:20 crc kubenswrapper[4757]: I1006 14:42:20.914965 4757 scope.go:117] "RemoveContainer" containerID="9361b1e1528928bcd4429ba3262f7724da9de74df6d60c2ac3008e7e2819b808" Oct 06 14:42:20 crc kubenswrapper[4757]: I1006 14:42:20.916362 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5jlxq" Oct 06 14:42:20 crc kubenswrapper[4757]: I1006 14:42:20.943189 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3e53fd88-4b6b-42ad-9c45-368a0346fe8e-kube-api-access-pzl5j" (OuterVolumeSpecName: "kube-api-access-pzl5j") pod "3e53fd88-4b6b-42ad-9c45-368a0346fe8e" (UID: "3e53fd88-4b6b-42ad-9c45-368a0346fe8e"). InnerVolumeSpecName "kube-api-access-pzl5j". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 14:42:20 crc kubenswrapper[4757]: I1006 14:42:20.946056 4757 scope.go:117] "RemoveContainer" containerID="3144dc342cc3ac6c8f0b1e8b9fce215c45fc1cbffa2c3f15d82a935aa3d479ae" Oct 06 14:42:20 crc kubenswrapper[4757]: I1006 14:42:20.964217 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pzl5j\" (UniqueName: \"kubernetes.io/projected/3e53fd88-4b6b-42ad-9c45-368a0346fe8e-kube-api-access-pzl5j\") on node \"crc\" DevicePath \"\"" Oct 06 14:42:20 crc kubenswrapper[4757]: I1006 14:42:20.993658 4757 scope.go:117] "RemoveContainer" containerID="f66ee12ae764eb1edaa8a5ce86e7dada5270abd63e4d8e4ee80392551a7880f7" Oct 06 14:42:21 crc kubenswrapper[4757]: I1006 14:42:21.030572 4757 scope.go:117] "RemoveContainer" containerID="9361b1e1528928bcd4429ba3262f7724da9de74df6d60c2ac3008e7e2819b808" Oct 06 14:42:21 crc kubenswrapper[4757]: E1006 14:42:21.031051 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9361b1e1528928bcd4429ba3262f7724da9de74df6d60c2ac3008e7e2819b808\": container with ID starting with 9361b1e1528928bcd4429ba3262f7724da9de74df6d60c2ac3008e7e2819b808 not found: ID does not exist" containerID="9361b1e1528928bcd4429ba3262f7724da9de74df6d60c2ac3008e7e2819b808" Oct 06 14:42:21 crc kubenswrapper[4757]: I1006 14:42:21.031122 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9361b1e1528928bcd4429ba3262f7724da9de74df6d60c2ac3008e7e2819b808"} err="failed to get container status \"9361b1e1528928bcd4429ba3262f7724da9de74df6d60c2ac3008e7e2819b808\": rpc error: code = NotFound desc = could not find container \"9361b1e1528928bcd4429ba3262f7724da9de74df6d60c2ac3008e7e2819b808\": container with ID starting with 9361b1e1528928bcd4429ba3262f7724da9de74df6d60c2ac3008e7e2819b808 not found: ID does not exist" Oct 06 14:42:21 crc kubenswrapper[4757]: I1006 14:42:21.031171 4757 scope.go:117] "RemoveContainer" containerID="3144dc342cc3ac6c8f0b1e8b9fce215c45fc1cbffa2c3f15d82a935aa3d479ae" Oct 06 14:42:21 crc kubenswrapper[4757]: E1006 14:42:21.031581 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3144dc342cc3ac6c8f0b1e8b9fce215c45fc1cbffa2c3f15d82a935aa3d479ae\": container with ID starting with 3144dc342cc3ac6c8f0b1e8b9fce215c45fc1cbffa2c3f15d82a935aa3d479ae not found: ID does not exist" containerID="3144dc342cc3ac6c8f0b1e8b9fce215c45fc1cbffa2c3f15d82a935aa3d479ae" Oct 06 14:42:21 crc kubenswrapper[4757]: I1006 14:42:21.031626 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3144dc342cc3ac6c8f0b1e8b9fce215c45fc1cbffa2c3f15d82a935aa3d479ae"} err="failed to get container status \"3144dc342cc3ac6c8f0b1e8b9fce215c45fc1cbffa2c3f15d82a935aa3d479ae\": rpc error: code = NotFound desc = could not find container \"3144dc342cc3ac6c8f0b1e8b9fce215c45fc1cbffa2c3f15d82a935aa3d479ae\": container with ID starting with 3144dc342cc3ac6c8f0b1e8b9fce215c45fc1cbffa2c3f15d82a935aa3d479ae not found: ID does not exist" Oct 06 14:42:21 crc kubenswrapper[4757]: I1006 14:42:21.031653 4757 scope.go:117] "RemoveContainer" containerID="f66ee12ae764eb1edaa8a5ce86e7dada5270abd63e4d8e4ee80392551a7880f7" Oct 06 14:42:21 crc kubenswrapper[4757]: E1006 14:42:21.031970 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f66ee12ae764eb1edaa8a5ce86e7dada5270abd63e4d8e4ee80392551a7880f7\": container with ID starting with f66ee12ae764eb1edaa8a5ce86e7dada5270abd63e4d8e4ee80392551a7880f7 not found: ID does not exist" containerID="f66ee12ae764eb1edaa8a5ce86e7dada5270abd63e4d8e4ee80392551a7880f7" Oct 06 14:42:21 crc kubenswrapper[4757]: I1006 14:42:21.031999 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f66ee12ae764eb1edaa8a5ce86e7dada5270abd63e4d8e4ee80392551a7880f7"} err="failed to get container status \"f66ee12ae764eb1edaa8a5ce86e7dada5270abd63e4d8e4ee80392551a7880f7\": rpc error: code = NotFound desc = could not find container \"f66ee12ae764eb1edaa8a5ce86e7dada5270abd63e4d8e4ee80392551a7880f7\": container with ID starting with f66ee12ae764eb1edaa8a5ce86e7dada5270abd63e4d8e4ee80392551a7880f7 not found: ID does not exist" Oct 06 14:42:21 crc kubenswrapper[4757]: I1006 14:42:21.252416 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-5jlxq"] Oct 06 14:42:21 crc kubenswrapper[4757]: I1006 14:42:21.256921 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-5jlxq"] Oct 06 14:42:22 crc kubenswrapper[4757]: I1006 14:42:22.195625 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3e53fd88-4b6b-42ad-9c45-368a0346fe8e" path="/var/lib/kubelet/pods/3e53fd88-4b6b-42ad-9c45-368a0346fe8e/volumes" Oct 06 14:42:29 crc kubenswrapper[4757]: I1006 14:42:29.180215 4757 scope.go:117] "RemoveContainer" containerID="e4a3d926d7d9790c8357af35f6f26cc4d65322931308c6a5e6391517071fd065" Oct 06 14:42:29 crc kubenswrapper[4757]: E1006 14:42:29.181515 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:42:42 crc kubenswrapper[4757]: I1006 14:42:42.183050 4757 scope.go:117] "RemoveContainer" containerID="e4a3d926d7d9790c8357af35f6f26cc4d65322931308c6a5e6391517071fd065" Oct 06 14:42:42 crc kubenswrapper[4757]: E1006 14:42:42.184846 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:42:53 crc kubenswrapper[4757]: I1006 14:42:53.180581 4757 scope.go:117] "RemoveContainer" containerID="e4a3d926d7d9790c8357af35f6f26cc4d65322931308c6a5e6391517071fd065" Oct 06 14:42:53 crc kubenswrapper[4757]: E1006 14:42:53.181503 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:43:06 crc kubenswrapper[4757]: I1006 14:43:06.725764 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-r86lf"] Oct 06 14:43:06 crc kubenswrapper[4757]: E1006 14:43:06.726878 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e53fd88-4b6b-42ad-9c45-368a0346fe8e" containerName="extract-utilities" Oct 06 14:43:06 crc kubenswrapper[4757]: I1006 14:43:06.726899 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e53fd88-4b6b-42ad-9c45-368a0346fe8e" containerName="extract-utilities" Oct 06 14:43:06 crc kubenswrapper[4757]: E1006 14:43:06.726916 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e53fd88-4b6b-42ad-9c45-368a0346fe8e" containerName="extract-content" Oct 06 14:43:06 crc kubenswrapper[4757]: I1006 14:43:06.726924 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e53fd88-4b6b-42ad-9c45-368a0346fe8e" containerName="extract-content" Oct 06 14:43:06 crc kubenswrapper[4757]: E1006 14:43:06.726957 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e53fd88-4b6b-42ad-9c45-368a0346fe8e" containerName="registry-server" Oct 06 14:43:06 crc kubenswrapper[4757]: I1006 14:43:06.726967 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e53fd88-4b6b-42ad-9c45-368a0346fe8e" containerName="registry-server" Oct 06 14:43:06 crc kubenswrapper[4757]: I1006 14:43:06.727338 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="3e53fd88-4b6b-42ad-9c45-368a0346fe8e" containerName="registry-server" Oct 06 14:43:06 crc kubenswrapper[4757]: I1006 14:43:06.729272 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-r86lf" Oct 06 14:43:06 crc kubenswrapper[4757]: I1006 14:43:06.785495 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-r86lf"] Oct 06 14:43:06 crc kubenswrapper[4757]: I1006 14:43:06.801526 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mhpsr\" (UniqueName: \"kubernetes.io/projected/3400d609-071b-4613-bb2f-cfc75f272968-kube-api-access-mhpsr\") pod \"redhat-operators-r86lf\" (UID: \"3400d609-071b-4613-bb2f-cfc75f272968\") " pod="openshift-marketplace/redhat-operators-r86lf" Oct 06 14:43:06 crc kubenswrapper[4757]: I1006 14:43:06.801602 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3400d609-071b-4613-bb2f-cfc75f272968-utilities\") pod \"redhat-operators-r86lf\" (UID: \"3400d609-071b-4613-bb2f-cfc75f272968\") " pod="openshift-marketplace/redhat-operators-r86lf" Oct 06 14:43:06 crc kubenswrapper[4757]: I1006 14:43:06.801648 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3400d609-071b-4613-bb2f-cfc75f272968-catalog-content\") pod \"redhat-operators-r86lf\" (UID: \"3400d609-071b-4613-bb2f-cfc75f272968\") " pod="openshift-marketplace/redhat-operators-r86lf" Oct 06 14:43:06 crc kubenswrapper[4757]: I1006 14:43:06.902757 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3400d609-071b-4613-bb2f-cfc75f272968-utilities\") pod \"redhat-operators-r86lf\" (UID: \"3400d609-071b-4613-bb2f-cfc75f272968\") " pod="openshift-marketplace/redhat-operators-r86lf" Oct 06 14:43:06 crc kubenswrapper[4757]: I1006 14:43:06.902817 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3400d609-071b-4613-bb2f-cfc75f272968-catalog-content\") pod \"redhat-operators-r86lf\" (UID: \"3400d609-071b-4613-bb2f-cfc75f272968\") " pod="openshift-marketplace/redhat-operators-r86lf" Oct 06 14:43:06 crc kubenswrapper[4757]: I1006 14:43:06.902941 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mhpsr\" (UniqueName: \"kubernetes.io/projected/3400d609-071b-4613-bb2f-cfc75f272968-kube-api-access-mhpsr\") pod \"redhat-operators-r86lf\" (UID: \"3400d609-071b-4613-bb2f-cfc75f272968\") " pod="openshift-marketplace/redhat-operators-r86lf" Oct 06 14:43:06 crc kubenswrapper[4757]: I1006 14:43:06.904097 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3400d609-071b-4613-bb2f-cfc75f272968-utilities\") pod \"redhat-operators-r86lf\" (UID: \"3400d609-071b-4613-bb2f-cfc75f272968\") " pod="openshift-marketplace/redhat-operators-r86lf" Oct 06 14:43:06 crc kubenswrapper[4757]: I1006 14:43:06.904339 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3400d609-071b-4613-bb2f-cfc75f272968-catalog-content\") pod \"redhat-operators-r86lf\" (UID: \"3400d609-071b-4613-bb2f-cfc75f272968\") " pod="openshift-marketplace/redhat-operators-r86lf" Oct 06 14:43:06 crc kubenswrapper[4757]: I1006 14:43:06.923507 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mhpsr\" (UniqueName: \"kubernetes.io/projected/3400d609-071b-4613-bb2f-cfc75f272968-kube-api-access-mhpsr\") pod \"redhat-operators-r86lf\" (UID: \"3400d609-071b-4613-bb2f-cfc75f272968\") " pod="openshift-marketplace/redhat-operators-r86lf" Oct 06 14:43:07 crc kubenswrapper[4757]: I1006 14:43:07.093572 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-r86lf" Oct 06 14:43:07 crc kubenswrapper[4757]: I1006 14:43:07.542402 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-r86lf"] Oct 06 14:43:08 crc kubenswrapper[4757]: I1006 14:43:08.181153 4757 scope.go:117] "RemoveContainer" containerID="e4a3d926d7d9790c8357af35f6f26cc4d65322931308c6a5e6391517071fd065" Oct 06 14:43:08 crc kubenswrapper[4757]: E1006 14:43:08.181540 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:43:08 crc kubenswrapper[4757]: I1006 14:43:08.337138 4757 generic.go:334] "Generic (PLEG): container finished" podID="3400d609-071b-4613-bb2f-cfc75f272968" containerID="4713b7d59a20bab0d8a0f258ae66c3d40e5dd2f8cb271fee93e806fa512f28bd" exitCode=0 Oct 06 14:43:08 crc kubenswrapper[4757]: I1006 14:43:08.337190 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-r86lf" event={"ID":"3400d609-071b-4613-bb2f-cfc75f272968","Type":"ContainerDied","Data":"4713b7d59a20bab0d8a0f258ae66c3d40e5dd2f8cb271fee93e806fa512f28bd"} Oct 06 14:43:08 crc kubenswrapper[4757]: I1006 14:43:08.337222 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-r86lf" event={"ID":"3400d609-071b-4613-bb2f-cfc75f272968","Type":"ContainerStarted","Data":"3f1d0502d2ac07afe9b307fba70971da578957a06444bfaace3ef80eb2b4fe6d"} Oct 06 14:43:10 crc kubenswrapper[4757]: I1006 14:43:10.359646 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-r86lf" event={"ID":"3400d609-071b-4613-bb2f-cfc75f272968","Type":"ContainerStarted","Data":"c0206acef3a0debe46bb35deb04eb1058d9ed66d24663f2cebcb0b4791ab4430"} Oct 06 14:43:11 crc kubenswrapper[4757]: I1006 14:43:11.378246 4757 generic.go:334] "Generic (PLEG): container finished" podID="3400d609-071b-4613-bb2f-cfc75f272968" containerID="c0206acef3a0debe46bb35deb04eb1058d9ed66d24663f2cebcb0b4791ab4430" exitCode=0 Oct 06 14:43:11 crc kubenswrapper[4757]: I1006 14:43:11.378304 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-r86lf" event={"ID":"3400d609-071b-4613-bb2f-cfc75f272968","Type":"ContainerDied","Data":"c0206acef3a0debe46bb35deb04eb1058d9ed66d24663f2cebcb0b4791ab4430"} Oct 06 14:43:12 crc kubenswrapper[4757]: I1006 14:43:12.388223 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-r86lf" event={"ID":"3400d609-071b-4613-bb2f-cfc75f272968","Type":"ContainerStarted","Data":"78f2f834b755c9f5355824341d98ccc75c2a9ce6c60a6a9fb9d2492192361057"} Oct 06 14:43:12 crc kubenswrapper[4757]: I1006 14:43:12.403276 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-r86lf" podStartSLOduration=2.798988852 podStartE2EDuration="6.403257995s" podCreationTimestamp="2025-10-06 14:43:06 +0000 UTC" firstStartedPulling="2025-10-06 14:43:08.340338239 +0000 UTC m=+3876.837656776" lastFinishedPulling="2025-10-06 14:43:11.944607372 +0000 UTC m=+3880.441925919" observedRunningTime="2025-10-06 14:43:12.402586404 +0000 UTC m=+3880.899904991" watchObservedRunningTime="2025-10-06 14:43:12.403257995 +0000 UTC m=+3880.900576532" Oct 06 14:43:17 crc kubenswrapper[4757]: I1006 14:43:17.093935 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-r86lf" Oct 06 14:43:17 crc kubenswrapper[4757]: I1006 14:43:17.094290 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-r86lf" Oct 06 14:43:18 crc kubenswrapper[4757]: I1006 14:43:18.175152 4757 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-r86lf" podUID="3400d609-071b-4613-bb2f-cfc75f272968" containerName="registry-server" probeResult="failure" output=< Oct 06 14:43:18 crc kubenswrapper[4757]: timeout: failed to connect service ":50051" within 1s Oct 06 14:43:18 crc kubenswrapper[4757]: > Oct 06 14:43:19 crc kubenswrapper[4757]: I1006 14:43:19.181025 4757 scope.go:117] "RemoveContainer" containerID="e4a3d926d7d9790c8357af35f6f26cc4d65322931308c6a5e6391517071fd065" Oct 06 14:43:19 crc kubenswrapper[4757]: E1006 14:43:19.181392 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:43:27 crc kubenswrapper[4757]: I1006 14:43:27.168387 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-r86lf" Oct 06 14:43:27 crc kubenswrapper[4757]: I1006 14:43:27.239320 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-r86lf" Oct 06 14:43:27 crc kubenswrapper[4757]: I1006 14:43:27.416698 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-r86lf"] Oct 06 14:43:28 crc kubenswrapper[4757]: I1006 14:43:28.532227 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-r86lf" podUID="3400d609-071b-4613-bb2f-cfc75f272968" containerName="registry-server" containerID="cri-o://78f2f834b755c9f5355824341d98ccc75c2a9ce6c60a6a9fb9d2492192361057" gracePeriod=2 Oct 06 14:43:28 crc kubenswrapper[4757]: I1006 14:43:28.958485 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-r86lf" Oct 06 14:43:29 crc kubenswrapper[4757]: I1006 14:43:29.150449 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3400d609-071b-4613-bb2f-cfc75f272968-catalog-content\") pod \"3400d609-071b-4613-bb2f-cfc75f272968\" (UID: \"3400d609-071b-4613-bb2f-cfc75f272968\") " Oct 06 14:43:29 crc kubenswrapper[4757]: I1006 14:43:29.150581 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mhpsr\" (UniqueName: \"kubernetes.io/projected/3400d609-071b-4613-bb2f-cfc75f272968-kube-api-access-mhpsr\") pod \"3400d609-071b-4613-bb2f-cfc75f272968\" (UID: \"3400d609-071b-4613-bb2f-cfc75f272968\") " Oct 06 14:43:29 crc kubenswrapper[4757]: I1006 14:43:29.150648 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3400d609-071b-4613-bb2f-cfc75f272968-utilities\") pod \"3400d609-071b-4613-bb2f-cfc75f272968\" (UID: \"3400d609-071b-4613-bb2f-cfc75f272968\") " Oct 06 14:43:29 crc kubenswrapper[4757]: I1006 14:43:29.152084 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3400d609-071b-4613-bb2f-cfc75f272968-utilities" (OuterVolumeSpecName: "utilities") pod "3400d609-071b-4613-bb2f-cfc75f272968" (UID: "3400d609-071b-4613-bb2f-cfc75f272968"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 14:43:29 crc kubenswrapper[4757]: I1006 14:43:29.163876 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3400d609-071b-4613-bb2f-cfc75f272968-kube-api-access-mhpsr" (OuterVolumeSpecName: "kube-api-access-mhpsr") pod "3400d609-071b-4613-bb2f-cfc75f272968" (UID: "3400d609-071b-4613-bb2f-cfc75f272968"). InnerVolumeSpecName "kube-api-access-mhpsr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 14:43:29 crc kubenswrapper[4757]: I1006 14:43:29.245553 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3400d609-071b-4613-bb2f-cfc75f272968-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3400d609-071b-4613-bb2f-cfc75f272968" (UID: "3400d609-071b-4613-bb2f-cfc75f272968"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 14:43:29 crc kubenswrapper[4757]: I1006 14:43:29.253200 4757 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3400d609-071b-4613-bb2f-cfc75f272968-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 06 14:43:29 crc kubenswrapper[4757]: I1006 14:43:29.253240 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mhpsr\" (UniqueName: \"kubernetes.io/projected/3400d609-071b-4613-bb2f-cfc75f272968-kube-api-access-mhpsr\") on node \"crc\" DevicePath \"\"" Oct 06 14:43:29 crc kubenswrapper[4757]: I1006 14:43:29.253250 4757 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3400d609-071b-4613-bb2f-cfc75f272968-utilities\") on node \"crc\" DevicePath \"\"" Oct 06 14:43:29 crc kubenswrapper[4757]: I1006 14:43:29.544805 4757 generic.go:334] "Generic (PLEG): container finished" podID="3400d609-071b-4613-bb2f-cfc75f272968" containerID="78f2f834b755c9f5355824341d98ccc75c2a9ce6c60a6a9fb9d2492192361057" exitCode=0 Oct 06 14:43:29 crc kubenswrapper[4757]: I1006 14:43:29.544859 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-r86lf" event={"ID":"3400d609-071b-4613-bb2f-cfc75f272968","Type":"ContainerDied","Data":"78f2f834b755c9f5355824341d98ccc75c2a9ce6c60a6a9fb9d2492192361057"} Oct 06 14:43:29 crc kubenswrapper[4757]: I1006 14:43:29.544955 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-r86lf" event={"ID":"3400d609-071b-4613-bb2f-cfc75f272968","Type":"ContainerDied","Data":"3f1d0502d2ac07afe9b307fba70971da578957a06444bfaace3ef80eb2b4fe6d"} Oct 06 14:43:29 crc kubenswrapper[4757]: I1006 14:43:29.545000 4757 scope.go:117] "RemoveContainer" containerID="78f2f834b755c9f5355824341d98ccc75c2a9ce6c60a6a9fb9d2492192361057" Oct 06 14:43:29 crc kubenswrapper[4757]: I1006 14:43:29.544897 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-r86lf" Oct 06 14:43:29 crc kubenswrapper[4757]: I1006 14:43:29.578857 4757 scope.go:117] "RemoveContainer" containerID="c0206acef3a0debe46bb35deb04eb1058d9ed66d24663f2cebcb0b4791ab4430" Oct 06 14:43:29 crc kubenswrapper[4757]: I1006 14:43:29.585321 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-r86lf"] Oct 06 14:43:29 crc kubenswrapper[4757]: I1006 14:43:29.592515 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-r86lf"] Oct 06 14:43:29 crc kubenswrapper[4757]: I1006 14:43:29.602986 4757 scope.go:117] "RemoveContainer" containerID="4713b7d59a20bab0d8a0f258ae66c3d40e5dd2f8cb271fee93e806fa512f28bd" Oct 06 14:43:29 crc kubenswrapper[4757]: I1006 14:43:29.647445 4757 scope.go:117] "RemoveContainer" containerID="78f2f834b755c9f5355824341d98ccc75c2a9ce6c60a6a9fb9d2492192361057" Oct 06 14:43:29 crc kubenswrapper[4757]: E1006 14:43:29.647835 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"78f2f834b755c9f5355824341d98ccc75c2a9ce6c60a6a9fb9d2492192361057\": container with ID starting with 78f2f834b755c9f5355824341d98ccc75c2a9ce6c60a6a9fb9d2492192361057 not found: ID does not exist" containerID="78f2f834b755c9f5355824341d98ccc75c2a9ce6c60a6a9fb9d2492192361057" Oct 06 14:43:29 crc kubenswrapper[4757]: I1006 14:43:29.647865 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"78f2f834b755c9f5355824341d98ccc75c2a9ce6c60a6a9fb9d2492192361057"} err="failed to get container status \"78f2f834b755c9f5355824341d98ccc75c2a9ce6c60a6a9fb9d2492192361057\": rpc error: code = NotFound desc = could not find container \"78f2f834b755c9f5355824341d98ccc75c2a9ce6c60a6a9fb9d2492192361057\": container with ID starting with 78f2f834b755c9f5355824341d98ccc75c2a9ce6c60a6a9fb9d2492192361057 not found: ID does not exist" Oct 06 14:43:29 crc kubenswrapper[4757]: I1006 14:43:29.647887 4757 scope.go:117] "RemoveContainer" containerID="c0206acef3a0debe46bb35deb04eb1058d9ed66d24663f2cebcb0b4791ab4430" Oct 06 14:43:29 crc kubenswrapper[4757]: E1006 14:43:29.648276 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c0206acef3a0debe46bb35deb04eb1058d9ed66d24663f2cebcb0b4791ab4430\": container with ID starting with c0206acef3a0debe46bb35deb04eb1058d9ed66d24663f2cebcb0b4791ab4430 not found: ID does not exist" containerID="c0206acef3a0debe46bb35deb04eb1058d9ed66d24663f2cebcb0b4791ab4430" Oct 06 14:43:29 crc kubenswrapper[4757]: I1006 14:43:29.648340 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c0206acef3a0debe46bb35deb04eb1058d9ed66d24663f2cebcb0b4791ab4430"} err="failed to get container status \"c0206acef3a0debe46bb35deb04eb1058d9ed66d24663f2cebcb0b4791ab4430\": rpc error: code = NotFound desc = could not find container \"c0206acef3a0debe46bb35deb04eb1058d9ed66d24663f2cebcb0b4791ab4430\": container with ID starting with c0206acef3a0debe46bb35deb04eb1058d9ed66d24663f2cebcb0b4791ab4430 not found: ID does not exist" Oct 06 14:43:29 crc kubenswrapper[4757]: I1006 14:43:29.648376 4757 scope.go:117] "RemoveContainer" containerID="4713b7d59a20bab0d8a0f258ae66c3d40e5dd2f8cb271fee93e806fa512f28bd" Oct 06 14:43:29 crc kubenswrapper[4757]: E1006 14:43:29.648644 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4713b7d59a20bab0d8a0f258ae66c3d40e5dd2f8cb271fee93e806fa512f28bd\": container with ID starting with 4713b7d59a20bab0d8a0f258ae66c3d40e5dd2f8cb271fee93e806fa512f28bd not found: ID does not exist" containerID="4713b7d59a20bab0d8a0f258ae66c3d40e5dd2f8cb271fee93e806fa512f28bd" Oct 06 14:43:29 crc kubenswrapper[4757]: I1006 14:43:29.648671 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4713b7d59a20bab0d8a0f258ae66c3d40e5dd2f8cb271fee93e806fa512f28bd"} err="failed to get container status \"4713b7d59a20bab0d8a0f258ae66c3d40e5dd2f8cb271fee93e806fa512f28bd\": rpc error: code = NotFound desc = could not find container \"4713b7d59a20bab0d8a0f258ae66c3d40e5dd2f8cb271fee93e806fa512f28bd\": container with ID starting with 4713b7d59a20bab0d8a0f258ae66c3d40e5dd2f8cb271fee93e806fa512f28bd not found: ID does not exist" Oct 06 14:43:30 crc kubenswrapper[4757]: I1006 14:43:30.180647 4757 scope.go:117] "RemoveContainer" containerID="e4a3d926d7d9790c8357af35f6f26cc4d65322931308c6a5e6391517071fd065" Oct 06 14:43:30 crc kubenswrapper[4757]: E1006 14:43:30.180931 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:43:30 crc kubenswrapper[4757]: I1006 14:43:30.202436 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3400d609-071b-4613-bb2f-cfc75f272968" path="/var/lib/kubelet/pods/3400d609-071b-4613-bb2f-cfc75f272968/volumes" Oct 06 14:43:44 crc kubenswrapper[4757]: I1006 14:43:44.180600 4757 scope.go:117] "RemoveContainer" containerID="e4a3d926d7d9790c8357af35f6f26cc4d65322931308c6a5e6391517071fd065" Oct 06 14:43:44 crc kubenswrapper[4757]: I1006 14:43:44.700795 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" event={"ID":"0010c888-d5ad-4b2b-8309-1647fdf0dee3","Type":"ContainerStarted","Data":"6f14f416bc15863a33384b8c8ea53140fc06f49cce9443dbe94b545c0834ba75"} Oct 06 14:44:22 crc kubenswrapper[4757]: I1006 14:44:22.193451 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-bv967"] Oct 06 14:44:22 crc kubenswrapper[4757]: E1006 14:44:22.194402 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3400d609-071b-4613-bb2f-cfc75f272968" containerName="extract-utilities" Oct 06 14:44:22 crc kubenswrapper[4757]: I1006 14:44:22.194419 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="3400d609-071b-4613-bb2f-cfc75f272968" containerName="extract-utilities" Oct 06 14:44:22 crc kubenswrapper[4757]: E1006 14:44:22.194439 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3400d609-071b-4613-bb2f-cfc75f272968" containerName="registry-server" Oct 06 14:44:22 crc kubenswrapper[4757]: I1006 14:44:22.194447 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="3400d609-071b-4613-bb2f-cfc75f272968" containerName="registry-server" Oct 06 14:44:22 crc kubenswrapper[4757]: E1006 14:44:22.194468 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3400d609-071b-4613-bb2f-cfc75f272968" containerName="extract-content" Oct 06 14:44:22 crc kubenswrapper[4757]: I1006 14:44:22.194476 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="3400d609-071b-4613-bb2f-cfc75f272968" containerName="extract-content" Oct 06 14:44:22 crc kubenswrapper[4757]: I1006 14:44:22.194674 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="3400d609-071b-4613-bb2f-cfc75f272968" containerName="registry-server" Oct 06 14:44:22 crc kubenswrapper[4757]: I1006 14:44:22.195935 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bv967" Oct 06 14:44:22 crc kubenswrapper[4757]: I1006 14:44:22.200637 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-bv967"] Oct 06 14:44:22 crc kubenswrapper[4757]: I1006 14:44:22.358130 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3b1e6db4-5c7b-49bc-ae8c-47c5500a0e37-utilities\") pod \"community-operators-bv967\" (UID: \"3b1e6db4-5c7b-49bc-ae8c-47c5500a0e37\") " pod="openshift-marketplace/community-operators-bv967" Oct 06 14:44:22 crc kubenswrapper[4757]: I1006 14:44:22.358462 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k7nsc\" (UniqueName: \"kubernetes.io/projected/3b1e6db4-5c7b-49bc-ae8c-47c5500a0e37-kube-api-access-k7nsc\") pod \"community-operators-bv967\" (UID: \"3b1e6db4-5c7b-49bc-ae8c-47c5500a0e37\") " pod="openshift-marketplace/community-operators-bv967" Oct 06 14:44:22 crc kubenswrapper[4757]: I1006 14:44:22.358566 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3b1e6db4-5c7b-49bc-ae8c-47c5500a0e37-catalog-content\") pod \"community-operators-bv967\" (UID: \"3b1e6db4-5c7b-49bc-ae8c-47c5500a0e37\") " pod="openshift-marketplace/community-operators-bv967" Oct 06 14:44:22 crc kubenswrapper[4757]: I1006 14:44:22.460289 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3b1e6db4-5c7b-49bc-ae8c-47c5500a0e37-utilities\") pod \"community-operators-bv967\" (UID: \"3b1e6db4-5c7b-49bc-ae8c-47c5500a0e37\") " pod="openshift-marketplace/community-operators-bv967" Oct 06 14:44:22 crc kubenswrapper[4757]: I1006 14:44:22.460701 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k7nsc\" (UniqueName: \"kubernetes.io/projected/3b1e6db4-5c7b-49bc-ae8c-47c5500a0e37-kube-api-access-k7nsc\") pod \"community-operators-bv967\" (UID: \"3b1e6db4-5c7b-49bc-ae8c-47c5500a0e37\") " pod="openshift-marketplace/community-operators-bv967" Oct 06 14:44:22 crc kubenswrapper[4757]: I1006 14:44:22.460747 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3b1e6db4-5c7b-49bc-ae8c-47c5500a0e37-catalog-content\") pod \"community-operators-bv967\" (UID: \"3b1e6db4-5c7b-49bc-ae8c-47c5500a0e37\") " pod="openshift-marketplace/community-operators-bv967" Oct 06 14:44:22 crc kubenswrapper[4757]: I1006 14:44:22.460826 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3b1e6db4-5c7b-49bc-ae8c-47c5500a0e37-utilities\") pod \"community-operators-bv967\" (UID: \"3b1e6db4-5c7b-49bc-ae8c-47c5500a0e37\") " pod="openshift-marketplace/community-operators-bv967" Oct 06 14:44:22 crc kubenswrapper[4757]: I1006 14:44:22.461291 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3b1e6db4-5c7b-49bc-ae8c-47c5500a0e37-catalog-content\") pod \"community-operators-bv967\" (UID: \"3b1e6db4-5c7b-49bc-ae8c-47c5500a0e37\") " pod="openshift-marketplace/community-operators-bv967" Oct 06 14:44:22 crc kubenswrapper[4757]: I1006 14:44:22.485835 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k7nsc\" (UniqueName: \"kubernetes.io/projected/3b1e6db4-5c7b-49bc-ae8c-47c5500a0e37-kube-api-access-k7nsc\") pod \"community-operators-bv967\" (UID: \"3b1e6db4-5c7b-49bc-ae8c-47c5500a0e37\") " pod="openshift-marketplace/community-operators-bv967" Oct 06 14:44:22 crc kubenswrapper[4757]: I1006 14:44:22.525068 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bv967" Oct 06 14:44:23 crc kubenswrapper[4757]: I1006 14:44:23.079427 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-bv967"] Oct 06 14:44:23 crc kubenswrapper[4757]: W1006 14:44:23.086306 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3b1e6db4_5c7b_49bc_ae8c_47c5500a0e37.slice/crio-e5cba6f741ee47aca2e9644ba9584e40f19aef85ddd161a502f6fe82a0f58c9c WatchSource:0}: Error finding container e5cba6f741ee47aca2e9644ba9584e40f19aef85ddd161a502f6fe82a0f58c9c: Status 404 returned error can't find the container with id e5cba6f741ee47aca2e9644ba9584e40f19aef85ddd161a502f6fe82a0f58c9c Oct 06 14:44:24 crc kubenswrapper[4757]: I1006 14:44:24.046400 4757 generic.go:334] "Generic (PLEG): container finished" podID="3b1e6db4-5c7b-49bc-ae8c-47c5500a0e37" containerID="3511abb0cf9fe6bd45ccc99ae585246b2a7f89a0a2596e5571565113943ac3b3" exitCode=0 Oct 06 14:44:24 crc kubenswrapper[4757]: I1006 14:44:24.046465 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bv967" event={"ID":"3b1e6db4-5c7b-49bc-ae8c-47c5500a0e37","Type":"ContainerDied","Data":"3511abb0cf9fe6bd45ccc99ae585246b2a7f89a0a2596e5571565113943ac3b3"} Oct 06 14:44:24 crc kubenswrapper[4757]: I1006 14:44:24.046723 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bv967" event={"ID":"3b1e6db4-5c7b-49bc-ae8c-47c5500a0e37","Type":"ContainerStarted","Data":"e5cba6f741ee47aca2e9644ba9584e40f19aef85ddd161a502f6fe82a0f58c9c"} Oct 06 14:44:25 crc kubenswrapper[4757]: I1006 14:44:25.060248 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bv967" event={"ID":"3b1e6db4-5c7b-49bc-ae8c-47c5500a0e37","Type":"ContainerStarted","Data":"3817c49c2c9e3ee2511f1b375d75362ef28ccaf7a25b150187a10703bf102f33"} Oct 06 14:44:26 crc kubenswrapper[4757]: I1006 14:44:26.075415 4757 generic.go:334] "Generic (PLEG): container finished" podID="3b1e6db4-5c7b-49bc-ae8c-47c5500a0e37" containerID="3817c49c2c9e3ee2511f1b375d75362ef28ccaf7a25b150187a10703bf102f33" exitCode=0 Oct 06 14:44:26 crc kubenswrapper[4757]: I1006 14:44:26.075466 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bv967" event={"ID":"3b1e6db4-5c7b-49bc-ae8c-47c5500a0e37","Type":"ContainerDied","Data":"3817c49c2c9e3ee2511f1b375d75362ef28ccaf7a25b150187a10703bf102f33"} Oct 06 14:44:27 crc kubenswrapper[4757]: I1006 14:44:27.084817 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bv967" event={"ID":"3b1e6db4-5c7b-49bc-ae8c-47c5500a0e37","Type":"ContainerStarted","Data":"8b252fd72df0d1e7ac64e2f5cc4ba19a6049b8a8cdb642f4430c8be00164de31"} Oct 06 14:44:27 crc kubenswrapper[4757]: I1006 14:44:27.113263 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-bv967" podStartSLOduration=2.651732346 podStartE2EDuration="5.11324206s" podCreationTimestamp="2025-10-06 14:44:22 +0000 UTC" firstStartedPulling="2025-10-06 14:44:24.050122607 +0000 UTC m=+3952.547441164" lastFinishedPulling="2025-10-06 14:44:26.511632311 +0000 UTC m=+3955.008950878" observedRunningTime="2025-10-06 14:44:27.107882538 +0000 UTC m=+3955.605201085" watchObservedRunningTime="2025-10-06 14:44:27.11324206 +0000 UTC m=+3955.610560607" Oct 06 14:44:32 crc kubenswrapper[4757]: I1006 14:44:32.525780 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-bv967" Oct 06 14:44:32 crc kubenswrapper[4757]: I1006 14:44:32.526259 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-bv967" Oct 06 14:44:32 crc kubenswrapper[4757]: I1006 14:44:32.590152 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-bv967" Oct 06 14:44:33 crc kubenswrapper[4757]: I1006 14:44:33.193902 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-bv967" Oct 06 14:44:33 crc kubenswrapper[4757]: I1006 14:44:33.256310 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-bv967"] Oct 06 14:44:35 crc kubenswrapper[4757]: I1006 14:44:35.152045 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-bv967" podUID="3b1e6db4-5c7b-49bc-ae8c-47c5500a0e37" containerName="registry-server" containerID="cri-o://8b252fd72df0d1e7ac64e2f5cc4ba19a6049b8a8cdb642f4430c8be00164de31" gracePeriod=2 Oct 06 14:44:35 crc kubenswrapper[4757]: I1006 14:44:35.548028 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bv967" Oct 06 14:44:35 crc kubenswrapper[4757]: I1006 14:44:35.669977 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k7nsc\" (UniqueName: \"kubernetes.io/projected/3b1e6db4-5c7b-49bc-ae8c-47c5500a0e37-kube-api-access-k7nsc\") pod \"3b1e6db4-5c7b-49bc-ae8c-47c5500a0e37\" (UID: \"3b1e6db4-5c7b-49bc-ae8c-47c5500a0e37\") " Oct 06 14:44:35 crc kubenswrapper[4757]: I1006 14:44:35.670031 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3b1e6db4-5c7b-49bc-ae8c-47c5500a0e37-utilities\") pod \"3b1e6db4-5c7b-49bc-ae8c-47c5500a0e37\" (UID: \"3b1e6db4-5c7b-49bc-ae8c-47c5500a0e37\") " Oct 06 14:44:35 crc kubenswrapper[4757]: I1006 14:44:35.670206 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3b1e6db4-5c7b-49bc-ae8c-47c5500a0e37-catalog-content\") pod \"3b1e6db4-5c7b-49bc-ae8c-47c5500a0e37\" (UID: \"3b1e6db4-5c7b-49bc-ae8c-47c5500a0e37\") " Oct 06 14:44:35 crc kubenswrapper[4757]: I1006 14:44:35.671040 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3b1e6db4-5c7b-49bc-ae8c-47c5500a0e37-utilities" (OuterVolumeSpecName: "utilities") pod "3b1e6db4-5c7b-49bc-ae8c-47c5500a0e37" (UID: "3b1e6db4-5c7b-49bc-ae8c-47c5500a0e37"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 14:44:35 crc kubenswrapper[4757]: I1006 14:44:35.677194 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3b1e6db4-5c7b-49bc-ae8c-47c5500a0e37-kube-api-access-k7nsc" (OuterVolumeSpecName: "kube-api-access-k7nsc") pod "3b1e6db4-5c7b-49bc-ae8c-47c5500a0e37" (UID: "3b1e6db4-5c7b-49bc-ae8c-47c5500a0e37"). InnerVolumeSpecName "kube-api-access-k7nsc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 14:44:35 crc kubenswrapper[4757]: I1006 14:44:35.723231 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3b1e6db4-5c7b-49bc-ae8c-47c5500a0e37-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3b1e6db4-5c7b-49bc-ae8c-47c5500a0e37" (UID: "3b1e6db4-5c7b-49bc-ae8c-47c5500a0e37"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 14:44:35 crc kubenswrapper[4757]: I1006 14:44:35.771727 4757 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3b1e6db4-5c7b-49bc-ae8c-47c5500a0e37-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 06 14:44:35 crc kubenswrapper[4757]: I1006 14:44:35.771765 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k7nsc\" (UniqueName: \"kubernetes.io/projected/3b1e6db4-5c7b-49bc-ae8c-47c5500a0e37-kube-api-access-k7nsc\") on node \"crc\" DevicePath \"\"" Oct 06 14:44:35 crc kubenswrapper[4757]: I1006 14:44:35.771780 4757 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3b1e6db4-5c7b-49bc-ae8c-47c5500a0e37-utilities\") on node \"crc\" DevicePath \"\"" Oct 06 14:44:36 crc kubenswrapper[4757]: I1006 14:44:36.160527 4757 generic.go:334] "Generic (PLEG): container finished" podID="3b1e6db4-5c7b-49bc-ae8c-47c5500a0e37" containerID="8b252fd72df0d1e7ac64e2f5cc4ba19a6049b8a8cdb642f4430c8be00164de31" exitCode=0 Oct 06 14:44:36 crc kubenswrapper[4757]: I1006 14:44:36.160705 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bv967" event={"ID":"3b1e6db4-5c7b-49bc-ae8c-47c5500a0e37","Type":"ContainerDied","Data":"8b252fd72df0d1e7ac64e2f5cc4ba19a6049b8a8cdb642f4430c8be00164de31"} Oct 06 14:44:36 crc kubenswrapper[4757]: I1006 14:44:36.161481 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bv967" event={"ID":"3b1e6db4-5c7b-49bc-ae8c-47c5500a0e37","Type":"ContainerDied","Data":"e5cba6f741ee47aca2e9644ba9584e40f19aef85ddd161a502f6fe82a0f58c9c"} Oct 06 14:44:36 crc kubenswrapper[4757]: I1006 14:44:36.161593 4757 scope.go:117] "RemoveContainer" containerID="8b252fd72df0d1e7ac64e2f5cc4ba19a6049b8a8cdb642f4430c8be00164de31" Oct 06 14:44:36 crc kubenswrapper[4757]: I1006 14:44:36.160784 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bv967" Oct 06 14:44:36 crc kubenswrapper[4757]: I1006 14:44:36.185330 4757 scope.go:117] "RemoveContainer" containerID="3817c49c2c9e3ee2511f1b375d75362ef28ccaf7a25b150187a10703bf102f33" Oct 06 14:44:36 crc kubenswrapper[4757]: I1006 14:44:36.204722 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-bv967"] Oct 06 14:44:36 crc kubenswrapper[4757]: I1006 14:44:36.207694 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-bv967"] Oct 06 14:44:36 crc kubenswrapper[4757]: I1006 14:44:36.210676 4757 scope.go:117] "RemoveContainer" containerID="3511abb0cf9fe6bd45ccc99ae585246b2a7f89a0a2596e5571565113943ac3b3" Oct 06 14:44:36 crc kubenswrapper[4757]: I1006 14:44:36.247685 4757 scope.go:117] "RemoveContainer" containerID="8b252fd72df0d1e7ac64e2f5cc4ba19a6049b8a8cdb642f4430c8be00164de31" Oct 06 14:44:36 crc kubenswrapper[4757]: E1006 14:44:36.248111 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8b252fd72df0d1e7ac64e2f5cc4ba19a6049b8a8cdb642f4430c8be00164de31\": container with ID starting with 8b252fd72df0d1e7ac64e2f5cc4ba19a6049b8a8cdb642f4430c8be00164de31 not found: ID does not exist" containerID="8b252fd72df0d1e7ac64e2f5cc4ba19a6049b8a8cdb642f4430c8be00164de31" Oct 06 14:44:36 crc kubenswrapper[4757]: I1006 14:44:36.248144 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8b252fd72df0d1e7ac64e2f5cc4ba19a6049b8a8cdb642f4430c8be00164de31"} err="failed to get container status \"8b252fd72df0d1e7ac64e2f5cc4ba19a6049b8a8cdb642f4430c8be00164de31\": rpc error: code = NotFound desc = could not find container \"8b252fd72df0d1e7ac64e2f5cc4ba19a6049b8a8cdb642f4430c8be00164de31\": container with ID starting with 8b252fd72df0d1e7ac64e2f5cc4ba19a6049b8a8cdb642f4430c8be00164de31 not found: ID does not exist" Oct 06 14:44:36 crc kubenswrapper[4757]: I1006 14:44:36.248163 4757 scope.go:117] "RemoveContainer" containerID="3817c49c2c9e3ee2511f1b375d75362ef28ccaf7a25b150187a10703bf102f33" Oct 06 14:44:36 crc kubenswrapper[4757]: E1006 14:44:36.248506 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3817c49c2c9e3ee2511f1b375d75362ef28ccaf7a25b150187a10703bf102f33\": container with ID starting with 3817c49c2c9e3ee2511f1b375d75362ef28ccaf7a25b150187a10703bf102f33 not found: ID does not exist" containerID="3817c49c2c9e3ee2511f1b375d75362ef28ccaf7a25b150187a10703bf102f33" Oct 06 14:44:36 crc kubenswrapper[4757]: I1006 14:44:36.248632 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3817c49c2c9e3ee2511f1b375d75362ef28ccaf7a25b150187a10703bf102f33"} err="failed to get container status \"3817c49c2c9e3ee2511f1b375d75362ef28ccaf7a25b150187a10703bf102f33\": rpc error: code = NotFound desc = could not find container \"3817c49c2c9e3ee2511f1b375d75362ef28ccaf7a25b150187a10703bf102f33\": container with ID starting with 3817c49c2c9e3ee2511f1b375d75362ef28ccaf7a25b150187a10703bf102f33 not found: ID does not exist" Oct 06 14:44:36 crc kubenswrapper[4757]: I1006 14:44:36.248882 4757 scope.go:117] "RemoveContainer" containerID="3511abb0cf9fe6bd45ccc99ae585246b2a7f89a0a2596e5571565113943ac3b3" Oct 06 14:44:36 crc kubenswrapper[4757]: E1006 14:44:36.249368 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3511abb0cf9fe6bd45ccc99ae585246b2a7f89a0a2596e5571565113943ac3b3\": container with ID starting with 3511abb0cf9fe6bd45ccc99ae585246b2a7f89a0a2596e5571565113943ac3b3 not found: ID does not exist" containerID="3511abb0cf9fe6bd45ccc99ae585246b2a7f89a0a2596e5571565113943ac3b3" Oct 06 14:44:36 crc kubenswrapper[4757]: I1006 14:44:36.249391 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3511abb0cf9fe6bd45ccc99ae585246b2a7f89a0a2596e5571565113943ac3b3"} err="failed to get container status \"3511abb0cf9fe6bd45ccc99ae585246b2a7f89a0a2596e5571565113943ac3b3\": rpc error: code = NotFound desc = could not find container \"3511abb0cf9fe6bd45ccc99ae585246b2a7f89a0a2596e5571565113943ac3b3\": container with ID starting with 3511abb0cf9fe6bd45ccc99ae585246b2a7f89a0a2596e5571565113943ac3b3 not found: ID does not exist" Oct 06 14:44:38 crc kubenswrapper[4757]: I1006 14:44:38.197074 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3b1e6db4-5c7b-49bc-ae8c-47c5500a0e37" path="/var/lib/kubelet/pods/3b1e6db4-5c7b-49bc-ae8c-47c5500a0e37/volumes" Oct 06 14:45:00 crc kubenswrapper[4757]: I1006 14:45:00.156927 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29329365-gtn44"] Oct 06 14:45:00 crc kubenswrapper[4757]: E1006 14:45:00.157970 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3b1e6db4-5c7b-49bc-ae8c-47c5500a0e37" containerName="registry-server" Oct 06 14:45:00 crc kubenswrapper[4757]: I1006 14:45:00.157987 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="3b1e6db4-5c7b-49bc-ae8c-47c5500a0e37" containerName="registry-server" Oct 06 14:45:00 crc kubenswrapper[4757]: E1006 14:45:00.158011 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3b1e6db4-5c7b-49bc-ae8c-47c5500a0e37" containerName="extract-utilities" Oct 06 14:45:00 crc kubenswrapper[4757]: I1006 14:45:00.158019 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="3b1e6db4-5c7b-49bc-ae8c-47c5500a0e37" containerName="extract-utilities" Oct 06 14:45:00 crc kubenswrapper[4757]: E1006 14:45:00.158040 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3b1e6db4-5c7b-49bc-ae8c-47c5500a0e37" containerName="extract-content" Oct 06 14:45:00 crc kubenswrapper[4757]: I1006 14:45:00.158050 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="3b1e6db4-5c7b-49bc-ae8c-47c5500a0e37" containerName="extract-content" Oct 06 14:45:00 crc kubenswrapper[4757]: I1006 14:45:00.158262 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="3b1e6db4-5c7b-49bc-ae8c-47c5500a0e37" containerName="registry-server" Oct 06 14:45:00 crc kubenswrapper[4757]: I1006 14:45:00.158865 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29329365-gtn44" Oct 06 14:45:00 crc kubenswrapper[4757]: I1006 14:45:00.164760 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29329365-gtn44"] Oct 06 14:45:00 crc kubenswrapper[4757]: I1006 14:45:00.196539 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 06 14:45:00 crc kubenswrapper[4757]: I1006 14:45:00.196595 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 06 14:45:00 crc kubenswrapper[4757]: I1006 14:45:00.258557 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ff2fd123-57fa-42f6-aef8-4f92bd8915ed-config-volume\") pod \"collect-profiles-29329365-gtn44\" (UID: \"ff2fd123-57fa-42f6-aef8-4f92bd8915ed\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29329365-gtn44" Oct 06 14:45:00 crc kubenswrapper[4757]: I1006 14:45:00.258626 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dhd9k\" (UniqueName: \"kubernetes.io/projected/ff2fd123-57fa-42f6-aef8-4f92bd8915ed-kube-api-access-dhd9k\") pod \"collect-profiles-29329365-gtn44\" (UID: \"ff2fd123-57fa-42f6-aef8-4f92bd8915ed\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29329365-gtn44" Oct 06 14:45:00 crc kubenswrapper[4757]: I1006 14:45:00.258706 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ff2fd123-57fa-42f6-aef8-4f92bd8915ed-secret-volume\") pod \"collect-profiles-29329365-gtn44\" (UID: \"ff2fd123-57fa-42f6-aef8-4f92bd8915ed\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29329365-gtn44" Oct 06 14:45:00 crc kubenswrapper[4757]: I1006 14:45:00.359771 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ff2fd123-57fa-42f6-aef8-4f92bd8915ed-secret-volume\") pod \"collect-profiles-29329365-gtn44\" (UID: \"ff2fd123-57fa-42f6-aef8-4f92bd8915ed\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29329365-gtn44" Oct 06 14:45:00 crc kubenswrapper[4757]: I1006 14:45:00.359830 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ff2fd123-57fa-42f6-aef8-4f92bd8915ed-config-volume\") pod \"collect-profiles-29329365-gtn44\" (UID: \"ff2fd123-57fa-42f6-aef8-4f92bd8915ed\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29329365-gtn44" Oct 06 14:45:00 crc kubenswrapper[4757]: I1006 14:45:00.359886 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dhd9k\" (UniqueName: \"kubernetes.io/projected/ff2fd123-57fa-42f6-aef8-4f92bd8915ed-kube-api-access-dhd9k\") pod \"collect-profiles-29329365-gtn44\" (UID: \"ff2fd123-57fa-42f6-aef8-4f92bd8915ed\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29329365-gtn44" Oct 06 14:45:00 crc kubenswrapper[4757]: I1006 14:45:00.361532 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ff2fd123-57fa-42f6-aef8-4f92bd8915ed-config-volume\") pod \"collect-profiles-29329365-gtn44\" (UID: \"ff2fd123-57fa-42f6-aef8-4f92bd8915ed\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29329365-gtn44" Oct 06 14:45:00 crc kubenswrapper[4757]: I1006 14:45:00.371488 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ff2fd123-57fa-42f6-aef8-4f92bd8915ed-secret-volume\") pod \"collect-profiles-29329365-gtn44\" (UID: \"ff2fd123-57fa-42f6-aef8-4f92bd8915ed\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29329365-gtn44" Oct 06 14:45:00 crc kubenswrapper[4757]: I1006 14:45:00.389570 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dhd9k\" (UniqueName: \"kubernetes.io/projected/ff2fd123-57fa-42f6-aef8-4f92bd8915ed-kube-api-access-dhd9k\") pod \"collect-profiles-29329365-gtn44\" (UID: \"ff2fd123-57fa-42f6-aef8-4f92bd8915ed\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29329365-gtn44" Oct 06 14:45:00 crc kubenswrapper[4757]: I1006 14:45:00.523733 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29329365-gtn44" Oct 06 14:45:00 crc kubenswrapper[4757]: I1006 14:45:00.975367 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29329365-gtn44"] Oct 06 14:45:01 crc kubenswrapper[4757]: I1006 14:45:01.372007 4757 generic.go:334] "Generic (PLEG): container finished" podID="ff2fd123-57fa-42f6-aef8-4f92bd8915ed" containerID="776e514f9f0d7c98a8afa53215b8000769f28a3f5fb6273a2539b8ae1706cf1a" exitCode=0 Oct 06 14:45:01 crc kubenswrapper[4757]: I1006 14:45:01.372055 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29329365-gtn44" event={"ID":"ff2fd123-57fa-42f6-aef8-4f92bd8915ed","Type":"ContainerDied","Data":"776e514f9f0d7c98a8afa53215b8000769f28a3f5fb6273a2539b8ae1706cf1a"} Oct 06 14:45:01 crc kubenswrapper[4757]: I1006 14:45:01.373319 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29329365-gtn44" event={"ID":"ff2fd123-57fa-42f6-aef8-4f92bd8915ed","Type":"ContainerStarted","Data":"34538c843bfef53711ab73eb0ec94c6b0ea3f964649e128060b083487eaa9e49"} Oct 06 14:45:02 crc kubenswrapper[4757]: I1006 14:45:02.663273 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29329365-gtn44" Oct 06 14:45:02 crc kubenswrapper[4757]: I1006 14:45:02.798539 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dhd9k\" (UniqueName: \"kubernetes.io/projected/ff2fd123-57fa-42f6-aef8-4f92bd8915ed-kube-api-access-dhd9k\") pod \"ff2fd123-57fa-42f6-aef8-4f92bd8915ed\" (UID: \"ff2fd123-57fa-42f6-aef8-4f92bd8915ed\") " Oct 06 14:45:02 crc kubenswrapper[4757]: I1006 14:45:02.798623 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ff2fd123-57fa-42f6-aef8-4f92bd8915ed-secret-volume\") pod \"ff2fd123-57fa-42f6-aef8-4f92bd8915ed\" (UID: \"ff2fd123-57fa-42f6-aef8-4f92bd8915ed\") " Oct 06 14:45:02 crc kubenswrapper[4757]: I1006 14:45:02.798696 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ff2fd123-57fa-42f6-aef8-4f92bd8915ed-config-volume\") pod \"ff2fd123-57fa-42f6-aef8-4f92bd8915ed\" (UID: \"ff2fd123-57fa-42f6-aef8-4f92bd8915ed\") " Oct 06 14:45:02 crc kubenswrapper[4757]: I1006 14:45:02.799666 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ff2fd123-57fa-42f6-aef8-4f92bd8915ed-config-volume" (OuterVolumeSpecName: "config-volume") pod "ff2fd123-57fa-42f6-aef8-4f92bd8915ed" (UID: "ff2fd123-57fa-42f6-aef8-4f92bd8915ed"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 14:45:02 crc kubenswrapper[4757]: I1006 14:45:02.806936 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ff2fd123-57fa-42f6-aef8-4f92bd8915ed-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "ff2fd123-57fa-42f6-aef8-4f92bd8915ed" (UID: "ff2fd123-57fa-42f6-aef8-4f92bd8915ed"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 14:45:02 crc kubenswrapper[4757]: I1006 14:45:02.808551 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ff2fd123-57fa-42f6-aef8-4f92bd8915ed-kube-api-access-dhd9k" (OuterVolumeSpecName: "kube-api-access-dhd9k") pod "ff2fd123-57fa-42f6-aef8-4f92bd8915ed" (UID: "ff2fd123-57fa-42f6-aef8-4f92bd8915ed"). InnerVolumeSpecName "kube-api-access-dhd9k". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 14:45:02 crc kubenswrapper[4757]: I1006 14:45:02.900389 4757 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ff2fd123-57fa-42f6-aef8-4f92bd8915ed-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 06 14:45:02 crc kubenswrapper[4757]: I1006 14:45:02.900429 4757 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ff2fd123-57fa-42f6-aef8-4f92bd8915ed-config-volume\") on node \"crc\" DevicePath \"\"" Oct 06 14:45:02 crc kubenswrapper[4757]: I1006 14:45:02.900442 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dhd9k\" (UniqueName: \"kubernetes.io/projected/ff2fd123-57fa-42f6-aef8-4f92bd8915ed-kube-api-access-dhd9k\") on node \"crc\" DevicePath \"\"" Oct 06 14:45:03 crc kubenswrapper[4757]: I1006 14:45:03.394402 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29329365-gtn44" event={"ID":"ff2fd123-57fa-42f6-aef8-4f92bd8915ed","Type":"ContainerDied","Data":"34538c843bfef53711ab73eb0ec94c6b0ea3f964649e128060b083487eaa9e49"} Oct 06 14:45:03 crc kubenswrapper[4757]: I1006 14:45:03.394445 4757 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="34538c843bfef53711ab73eb0ec94c6b0ea3f964649e128060b083487eaa9e49" Oct 06 14:45:03 crc kubenswrapper[4757]: I1006 14:45:03.394504 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29329365-gtn44" Oct 06 14:45:03 crc kubenswrapper[4757]: I1006 14:45:03.736455 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29329320-7c82b"] Oct 06 14:45:03 crc kubenswrapper[4757]: I1006 14:45:03.741878 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29329320-7c82b"] Oct 06 14:45:04 crc kubenswrapper[4757]: I1006 14:45:04.193911 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aff831bf-b060-42c3-a4af-013f232517fc" path="/var/lib/kubelet/pods/aff831bf-b060-42c3-a4af-013f232517fc/volumes" Oct 06 14:45:36 crc kubenswrapper[4757]: I1006 14:45:36.896344 4757 scope.go:117] "RemoveContainer" containerID="5ce20a78c90863c4b79ad3c82e2e84e797731dd140f3ca3a25cb4e714ff4b4e1" Oct 06 14:46:04 crc kubenswrapper[4757]: I1006 14:46:04.361126 4757 patch_prober.go:28] interesting pod/machine-config-daemon-7tb7h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 06 14:46:04 crc kubenswrapper[4757]: I1006 14:46:04.361689 4757 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 06 14:46:34 crc kubenswrapper[4757]: I1006 14:46:34.360821 4757 patch_prober.go:28] interesting pod/machine-config-daemon-7tb7h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 06 14:46:34 crc kubenswrapper[4757]: I1006 14:46:34.362294 4757 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 06 14:47:04 crc kubenswrapper[4757]: I1006 14:47:04.361639 4757 patch_prober.go:28] interesting pod/machine-config-daemon-7tb7h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 06 14:47:04 crc kubenswrapper[4757]: I1006 14:47:04.362402 4757 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 06 14:47:04 crc kubenswrapper[4757]: I1006 14:47:04.362462 4757 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" Oct 06 14:47:04 crc kubenswrapper[4757]: I1006 14:47:04.363332 4757 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"6f14f416bc15863a33384b8c8ea53140fc06f49cce9443dbe94b545c0834ba75"} pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 06 14:47:04 crc kubenswrapper[4757]: I1006 14:47:04.363408 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" containerName="machine-config-daemon" containerID="cri-o://6f14f416bc15863a33384b8c8ea53140fc06f49cce9443dbe94b545c0834ba75" gracePeriod=600 Oct 06 14:47:05 crc kubenswrapper[4757]: I1006 14:47:05.482934 4757 generic.go:334] "Generic (PLEG): container finished" podID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" containerID="6f14f416bc15863a33384b8c8ea53140fc06f49cce9443dbe94b545c0834ba75" exitCode=0 Oct 06 14:47:05 crc kubenswrapper[4757]: I1006 14:47:05.483036 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" event={"ID":"0010c888-d5ad-4b2b-8309-1647fdf0dee3","Type":"ContainerDied","Data":"6f14f416bc15863a33384b8c8ea53140fc06f49cce9443dbe94b545c0834ba75"} Oct 06 14:47:05 crc kubenswrapper[4757]: I1006 14:47:05.483859 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" event={"ID":"0010c888-d5ad-4b2b-8309-1647fdf0dee3","Type":"ContainerStarted","Data":"4a9d6ebdcaf823a4eb3fc2bef5a2f0b2046cf043ea1e0ad4d8bbe3e2e5542439"} Oct 06 14:47:05 crc kubenswrapper[4757]: I1006 14:47:05.483911 4757 scope.go:117] "RemoveContainer" containerID="e4a3d926d7d9790c8357af35f6f26cc4d65322931308c6a5e6391517071fd065" Oct 06 14:47:12 crc kubenswrapper[4757]: I1006 14:47:12.493224 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-tcrgm"] Oct 06 14:47:12 crc kubenswrapper[4757]: E1006 14:47:12.494115 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ff2fd123-57fa-42f6-aef8-4f92bd8915ed" containerName="collect-profiles" Oct 06 14:47:12 crc kubenswrapper[4757]: I1006 14:47:12.494150 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="ff2fd123-57fa-42f6-aef8-4f92bd8915ed" containerName="collect-profiles" Oct 06 14:47:12 crc kubenswrapper[4757]: I1006 14:47:12.494339 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="ff2fd123-57fa-42f6-aef8-4f92bd8915ed" containerName="collect-profiles" Oct 06 14:47:12 crc kubenswrapper[4757]: I1006 14:47:12.495368 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tcrgm" Oct 06 14:47:12 crc kubenswrapper[4757]: I1006 14:47:12.507880 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-tcrgm"] Oct 06 14:47:12 crc kubenswrapper[4757]: I1006 14:47:12.561584 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tc9cd\" (UniqueName: \"kubernetes.io/projected/53a099a3-2246-4e33-a3fe-2c0ff1e780f0-kube-api-access-tc9cd\") pod \"certified-operators-tcrgm\" (UID: \"53a099a3-2246-4e33-a3fe-2c0ff1e780f0\") " pod="openshift-marketplace/certified-operators-tcrgm" Oct 06 14:47:12 crc kubenswrapper[4757]: I1006 14:47:12.561650 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/53a099a3-2246-4e33-a3fe-2c0ff1e780f0-catalog-content\") pod \"certified-operators-tcrgm\" (UID: \"53a099a3-2246-4e33-a3fe-2c0ff1e780f0\") " pod="openshift-marketplace/certified-operators-tcrgm" Oct 06 14:47:12 crc kubenswrapper[4757]: I1006 14:47:12.561697 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/53a099a3-2246-4e33-a3fe-2c0ff1e780f0-utilities\") pod \"certified-operators-tcrgm\" (UID: \"53a099a3-2246-4e33-a3fe-2c0ff1e780f0\") " pod="openshift-marketplace/certified-operators-tcrgm" Oct 06 14:47:12 crc kubenswrapper[4757]: I1006 14:47:12.663798 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/53a099a3-2246-4e33-a3fe-2c0ff1e780f0-utilities\") pod \"certified-operators-tcrgm\" (UID: \"53a099a3-2246-4e33-a3fe-2c0ff1e780f0\") " pod="openshift-marketplace/certified-operators-tcrgm" Oct 06 14:47:12 crc kubenswrapper[4757]: I1006 14:47:12.663903 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tc9cd\" (UniqueName: \"kubernetes.io/projected/53a099a3-2246-4e33-a3fe-2c0ff1e780f0-kube-api-access-tc9cd\") pod \"certified-operators-tcrgm\" (UID: \"53a099a3-2246-4e33-a3fe-2c0ff1e780f0\") " pod="openshift-marketplace/certified-operators-tcrgm" Oct 06 14:47:12 crc kubenswrapper[4757]: I1006 14:47:12.663939 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/53a099a3-2246-4e33-a3fe-2c0ff1e780f0-catalog-content\") pod \"certified-operators-tcrgm\" (UID: \"53a099a3-2246-4e33-a3fe-2c0ff1e780f0\") " pod="openshift-marketplace/certified-operators-tcrgm" Oct 06 14:47:12 crc kubenswrapper[4757]: I1006 14:47:12.664598 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/53a099a3-2246-4e33-a3fe-2c0ff1e780f0-utilities\") pod \"certified-operators-tcrgm\" (UID: \"53a099a3-2246-4e33-a3fe-2c0ff1e780f0\") " pod="openshift-marketplace/certified-operators-tcrgm" Oct 06 14:47:12 crc kubenswrapper[4757]: I1006 14:47:12.664634 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/53a099a3-2246-4e33-a3fe-2c0ff1e780f0-catalog-content\") pod \"certified-operators-tcrgm\" (UID: \"53a099a3-2246-4e33-a3fe-2c0ff1e780f0\") " pod="openshift-marketplace/certified-operators-tcrgm" Oct 06 14:47:12 crc kubenswrapper[4757]: I1006 14:47:12.695553 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tc9cd\" (UniqueName: \"kubernetes.io/projected/53a099a3-2246-4e33-a3fe-2c0ff1e780f0-kube-api-access-tc9cd\") pod \"certified-operators-tcrgm\" (UID: \"53a099a3-2246-4e33-a3fe-2c0ff1e780f0\") " pod="openshift-marketplace/certified-operators-tcrgm" Oct 06 14:47:12 crc kubenswrapper[4757]: I1006 14:47:12.815526 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tcrgm" Oct 06 14:47:13 crc kubenswrapper[4757]: I1006 14:47:13.282004 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-tcrgm"] Oct 06 14:47:13 crc kubenswrapper[4757]: I1006 14:47:13.573516 4757 generic.go:334] "Generic (PLEG): container finished" podID="53a099a3-2246-4e33-a3fe-2c0ff1e780f0" containerID="c171f38c0a02963c316cfdd9cff667ca2c4ad7b50f765f40e67a78df41a9e9b9" exitCode=0 Oct 06 14:47:13 crc kubenswrapper[4757]: I1006 14:47:13.573569 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tcrgm" event={"ID":"53a099a3-2246-4e33-a3fe-2c0ff1e780f0","Type":"ContainerDied","Data":"c171f38c0a02963c316cfdd9cff667ca2c4ad7b50f765f40e67a78df41a9e9b9"} Oct 06 14:47:13 crc kubenswrapper[4757]: I1006 14:47:13.573855 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tcrgm" event={"ID":"53a099a3-2246-4e33-a3fe-2c0ff1e780f0","Type":"ContainerStarted","Data":"f8ec1538f22ec1b1face3dcd49b7e50cb2f4a614d6f7fe7db4b13b795a8c71fe"} Oct 06 14:47:13 crc kubenswrapper[4757]: I1006 14:47:13.576449 4757 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 06 14:47:14 crc kubenswrapper[4757]: I1006 14:47:14.586343 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tcrgm" event={"ID":"53a099a3-2246-4e33-a3fe-2c0ff1e780f0","Type":"ContainerStarted","Data":"a02397ae80f956a394c3307a496da2b0ab74dbced1ea1df7373eeb67ecb91c34"} Oct 06 14:47:15 crc kubenswrapper[4757]: I1006 14:47:15.594078 4757 generic.go:334] "Generic (PLEG): container finished" podID="53a099a3-2246-4e33-a3fe-2c0ff1e780f0" containerID="a02397ae80f956a394c3307a496da2b0ab74dbced1ea1df7373eeb67ecb91c34" exitCode=0 Oct 06 14:47:15 crc kubenswrapper[4757]: I1006 14:47:15.594171 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tcrgm" event={"ID":"53a099a3-2246-4e33-a3fe-2c0ff1e780f0","Type":"ContainerDied","Data":"a02397ae80f956a394c3307a496da2b0ab74dbced1ea1df7373eeb67ecb91c34"} Oct 06 14:47:16 crc kubenswrapper[4757]: I1006 14:47:16.612460 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tcrgm" event={"ID":"53a099a3-2246-4e33-a3fe-2c0ff1e780f0","Type":"ContainerStarted","Data":"c2e0bd70a1a1bb062634e188a0de9971b5b6157f9ca085bd11def2ddb8a9e979"} Oct 06 14:47:22 crc kubenswrapper[4757]: I1006 14:47:22.816215 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-tcrgm" Oct 06 14:47:22 crc kubenswrapper[4757]: I1006 14:47:22.816893 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-tcrgm" Oct 06 14:47:22 crc kubenswrapper[4757]: I1006 14:47:22.863628 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-tcrgm" Oct 06 14:47:22 crc kubenswrapper[4757]: I1006 14:47:22.889211 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-tcrgm" podStartSLOduration=8.45205937 podStartE2EDuration="10.889184511s" podCreationTimestamp="2025-10-06 14:47:12 +0000 UTC" firstStartedPulling="2025-10-06 14:47:13.576043743 +0000 UTC m=+4122.073362310" lastFinishedPulling="2025-10-06 14:47:16.013168874 +0000 UTC m=+4124.510487451" observedRunningTime="2025-10-06 14:47:16.6379594 +0000 UTC m=+4125.135277977" watchObservedRunningTime="2025-10-06 14:47:22.889184511 +0000 UTC m=+4131.386503088" Oct 06 14:47:23 crc kubenswrapper[4757]: I1006 14:47:23.718526 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-tcrgm" Oct 06 14:47:23 crc kubenswrapper[4757]: I1006 14:47:23.778673 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-tcrgm"] Oct 06 14:47:25 crc kubenswrapper[4757]: I1006 14:47:25.688369 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-tcrgm" podUID="53a099a3-2246-4e33-a3fe-2c0ff1e780f0" containerName="registry-server" containerID="cri-o://c2e0bd70a1a1bb062634e188a0de9971b5b6157f9ca085bd11def2ddb8a9e979" gracePeriod=2 Oct 06 14:47:26 crc kubenswrapper[4757]: I1006 14:47:26.081835 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tcrgm" Oct 06 14:47:26 crc kubenswrapper[4757]: I1006 14:47:26.168316 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/53a099a3-2246-4e33-a3fe-2c0ff1e780f0-utilities\") pod \"53a099a3-2246-4e33-a3fe-2c0ff1e780f0\" (UID: \"53a099a3-2246-4e33-a3fe-2c0ff1e780f0\") " Oct 06 14:47:26 crc kubenswrapper[4757]: I1006 14:47:26.168359 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/53a099a3-2246-4e33-a3fe-2c0ff1e780f0-catalog-content\") pod \"53a099a3-2246-4e33-a3fe-2c0ff1e780f0\" (UID: \"53a099a3-2246-4e33-a3fe-2c0ff1e780f0\") " Oct 06 14:47:26 crc kubenswrapper[4757]: I1006 14:47:26.168405 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tc9cd\" (UniqueName: \"kubernetes.io/projected/53a099a3-2246-4e33-a3fe-2c0ff1e780f0-kube-api-access-tc9cd\") pod \"53a099a3-2246-4e33-a3fe-2c0ff1e780f0\" (UID: \"53a099a3-2246-4e33-a3fe-2c0ff1e780f0\") " Oct 06 14:47:26 crc kubenswrapper[4757]: I1006 14:47:26.169284 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/53a099a3-2246-4e33-a3fe-2c0ff1e780f0-utilities" (OuterVolumeSpecName: "utilities") pod "53a099a3-2246-4e33-a3fe-2c0ff1e780f0" (UID: "53a099a3-2246-4e33-a3fe-2c0ff1e780f0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 14:47:26 crc kubenswrapper[4757]: I1006 14:47:26.174253 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/53a099a3-2246-4e33-a3fe-2c0ff1e780f0-kube-api-access-tc9cd" (OuterVolumeSpecName: "kube-api-access-tc9cd") pod "53a099a3-2246-4e33-a3fe-2c0ff1e780f0" (UID: "53a099a3-2246-4e33-a3fe-2c0ff1e780f0"). InnerVolumeSpecName "kube-api-access-tc9cd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 14:47:26 crc kubenswrapper[4757]: I1006 14:47:26.223286 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/53a099a3-2246-4e33-a3fe-2c0ff1e780f0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "53a099a3-2246-4e33-a3fe-2c0ff1e780f0" (UID: "53a099a3-2246-4e33-a3fe-2c0ff1e780f0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 14:47:26 crc kubenswrapper[4757]: I1006 14:47:26.270687 4757 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/53a099a3-2246-4e33-a3fe-2c0ff1e780f0-utilities\") on node \"crc\" DevicePath \"\"" Oct 06 14:47:26 crc kubenswrapper[4757]: I1006 14:47:26.270756 4757 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/53a099a3-2246-4e33-a3fe-2c0ff1e780f0-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 06 14:47:26 crc kubenswrapper[4757]: I1006 14:47:26.270778 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tc9cd\" (UniqueName: \"kubernetes.io/projected/53a099a3-2246-4e33-a3fe-2c0ff1e780f0-kube-api-access-tc9cd\") on node \"crc\" DevicePath \"\"" Oct 06 14:47:26 crc kubenswrapper[4757]: I1006 14:47:26.700883 4757 generic.go:334] "Generic (PLEG): container finished" podID="53a099a3-2246-4e33-a3fe-2c0ff1e780f0" containerID="c2e0bd70a1a1bb062634e188a0de9971b5b6157f9ca085bd11def2ddb8a9e979" exitCode=0 Oct 06 14:47:26 crc kubenswrapper[4757]: I1006 14:47:26.700931 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tcrgm" event={"ID":"53a099a3-2246-4e33-a3fe-2c0ff1e780f0","Type":"ContainerDied","Data":"c2e0bd70a1a1bb062634e188a0de9971b5b6157f9ca085bd11def2ddb8a9e979"} Oct 06 14:47:26 crc kubenswrapper[4757]: I1006 14:47:26.700986 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tcrgm" event={"ID":"53a099a3-2246-4e33-a3fe-2c0ff1e780f0","Type":"ContainerDied","Data":"f8ec1538f22ec1b1face3dcd49b7e50cb2f4a614d6f7fe7db4b13b795a8c71fe"} Oct 06 14:47:26 crc kubenswrapper[4757]: I1006 14:47:26.701005 4757 scope.go:117] "RemoveContainer" containerID="c2e0bd70a1a1bb062634e188a0de9971b5b6157f9ca085bd11def2ddb8a9e979" Oct 06 14:47:26 crc kubenswrapper[4757]: I1006 14:47:26.701075 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tcrgm" Oct 06 14:47:26 crc kubenswrapper[4757]: I1006 14:47:26.724282 4757 scope.go:117] "RemoveContainer" containerID="a02397ae80f956a394c3307a496da2b0ab74dbced1ea1df7373eeb67ecb91c34" Oct 06 14:47:26 crc kubenswrapper[4757]: I1006 14:47:26.750449 4757 scope.go:117] "RemoveContainer" containerID="c171f38c0a02963c316cfdd9cff667ca2c4ad7b50f765f40e67a78df41a9e9b9" Oct 06 14:47:26 crc kubenswrapper[4757]: I1006 14:47:26.756500 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-tcrgm"] Oct 06 14:47:26 crc kubenswrapper[4757]: I1006 14:47:26.761862 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-tcrgm"] Oct 06 14:47:26 crc kubenswrapper[4757]: I1006 14:47:26.784589 4757 scope.go:117] "RemoveContainer" containerID="c2e0bd70a1a1bb062634e188a0de9971b5b6157f9ca085bd11def2ddb8a9e979" Oct 06 14:47:26 crc kubenswrapper[4757]: E1006 14:47:26.785182 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c2e0bd70a1a1bb062634e188a0de9971b5b6157f9ca085bd11def2ddb8a9e979\": container with ID starting with c2e0bd70a1a1bb062634e188a0de9971b5b6157f9ca085bd11def2ddb8a9e979 not found: ID does not exist" containerID="c2e0bd70a1a1bb062634e188a0de9971b5b6157f9ca085bd11def2ddb8a9e979" Oct 06 14:47:26 crc kubenswrapper[4757]: I1006 14:47:26.785225 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c2e0bd70a1a1bb062634e188a0de9971b5b6157f9ca085bd11def2ddb8a9e979"} err="failed to get container status \"c2e0bd70a1a1bb062634e188a0de9971b5b6157f9ca085bd11def2ddb8a9e979\": rpc error: code = NotFound desc = could not find container \"c2e0bd70a1a1bb062634e188a0de9971b5b6157f9ca085bd11def2ddb8a9e979\": container with ID starting with c2e0bd70a1a1bb062634e188a0de9971b5b6157f9ca085bd11def2ddb8a9e979 not found: ID does not exist" Oct 06 14:47:26 crc kubenswrapper[4757]: I1006 14:47:26.785260 4757 scope.go:117] "RemoveContainer" containerID="a02397ae80f956a394c3307a496da2b0ab74dbced1ea1df7373eeb67ecb91c34" Oct 06 14:47:26 crc kubenswrapper[4757]: E1006 14:47:26.785713 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a02397ae80f956a394c3307a496da2b0ab74dbced1ea1df7373eeb67ecb91c34\": container with ID starting with a02397ae80f956a394c3307a496da2b0ab74dbced1ea1df7373eeb67ecb91c34 not found: ID does not exist" containerID="a02397ae80f956a394c3307a496da2b0ab74dbced1ea1df7373eeb67ecb91c34" Oct 06 14:47:26 crc kubenswrapper[4757]: I1006 14:47:26.785745 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a02397ae80f956a394c3307a496da2b0ab74dbced1ea1df7373eeb67ecb91c34"} err="failed to get container status \"a02397ae80f956a394c3307a496da2b0ab74dbced1ea1df7373eeb67ecb91c34\": rpc error: code = NotFound desc = could not find container \"a02397ae80f956a394c3307a496da2b0ab74dbced1ea1df7373eeb67ecb91c34\": container with ID starting with a02397ae80f956a394c3307a496da2b0ab74dbced1ea1df7373eeb67ecb91c34 not found: ID does not exist" Oct 06 14:47:26 crc kubenswrapper[4757]: I1006 14:47:26.785772 4757 scope.go:117] "RemoveContainer" containerID="c171f38c0a02963c316cfdd9cff667ca2c4ad7b50f765f40e67a78df41a9e9b9" Oct 06 14:47:26 crc kubenswrapper[4757]: E1006 14:47:26.786072 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c171f38c0a02963c316cfdd9cff667ca2c4ad7b50f765f40e67a78df41a9e9b9\": container with ID starting with c171f38c0a02963c316cfdd9cff667ca2c4ad7b50f765f40e67a78df41a9e9b9 not found: ID does not exist" containerID="c171f38c0a02963c316cfdd9cff667ca2c4ad7b50f765f40e67a78df41a9e9b9" Oct 06 14:47:26 crc kubenswrapper[4757]: I1006 14:47:26.786139 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c171f38c0a02963c316cfdd9cff667ca2c4ad7b50f765f40e67a78df41a9e9b9"} err="failed to get container status \"c171f38c0a02963c316cfdd9cff667ca2c4ad7b50f765f40e67a78df41a9e9b9\": rpc error: code = NotFound desc = could not find container \"c171f38c0a02963c316cfdd9cff667ca2c4ad7b50f765f40e67a78df41a9e9b9\": container with ID starting with c171f38c0a02963c316cfdd9cff667ca2c4ad7b50f765f40e67a78df41a9e9b9 not found: ID does not exist" Oct 06 14:47:28 crc kubenswrapper[4757]: I1006 14:47:28.198959 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="53a099a3-2246-4e33-a3fe-2c0ff1e780f0" path="/var/lib/kubelet/pods/53a099a3-2246-4e33-a3fe-2c0ff1e780f0/volumes" Oct 06 14:49:04 crc kubenswrapper[4757]: I1006 14:49:04.361714 4757 patch_prober.go:28] interesting pod/machine-config-daemon-7tb7h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 06 14:49:04 crc kubenswrapper[4757]: I1006 14:49:04.362409 4757 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 06 14:49:34 crc kubenswrapper[4757]: I1006 14:49:34.361231 4757 patch_prober.go:28] interesting pod/machine-config-daemon-7tb7h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 06 14:49:34 crc kubenswrapper[4757]: I1006 14:49:34.361907 4757 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 06 14:50:04 crc kubenswrapper[4757]: I1006 14:50:04.361396 4757 patch_prober.go:28] interesting pod/machine-config-daemon-7tb7h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 06 14:50:04 crc kubenswrapper[4757]: I1006 14:50:04.362078 4757 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 06 14:50:04 crc kubenswrapper[4757]: I1006 14:50:04.362241 4757 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" Oct 06 14:50:04 crc kubenswrapper[4757]: I1006 14:50:04.363853 4757 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"4a9d6ebdcaf823a4eb3fc2bef5a2f0b2046cf043ea1e0ad4d8bbe3e2e5542439"} pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 06 14:50:04 crc kubenswrapper[4757]: I1006 14:50:04.363986 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" containerName="machine-config-daemon" containerID="cri-o://4a9d6ebdcaf823a4eb3fc2bef5a2f0b2046cf043ea1e0ad4d8bbe3e2e5542439" gracePeriod=600 Oct 06 14:50:04 crc kubenswrapper[4757]: E1006 14:50:04.497293 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:50:05 crc kubenswrapper[4757]: I1006 14:50:05.125088 4757 generic.go:334] "Generic (PLEG): container finished" podID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" containerID="4a9d6ebdcaf823a4eb3fc2bef5a2f0b2046cf043ea1e0ad4d8bbe3e2e5542439" exitCode=0 Oct 06 14:50:05 crc kubenswrapper[4757]: I1006 14:50:05.125204 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" event={"ID":"0010c888-d5ad-4b2b-8309-1647fdf0dee3","Type":"ContainerDied","Data":"4a9d6ebdcaf823a4eb3fc2bef5a2f0b2046cf043ea1e0ad4d8bbe3e2e5542439"} Oct 06 14:50:05 crc kubenswrapper[4757]: I1006 14:50:05.125316 4757 scope.go:117] "RemoveContainer" containerID="6f14f416bc15863a33384b8c8ea53140fc06f49cce9443dbe94b545c0834ba75" Oct 06 14:50:05 crc kubenswrapper[4757]: I1006 14:50:05.126350 4757 scope.go:117] "RemoveContainer" containerID="4a9d6ebdcaf823a4eb3fc2bef5a2f0b2046cf043ea1e0ad4d8bbe3e2e5542439" Oct 06 14:50:05 crc kubenswrapper[4757]: E1006 14:50:05.126697 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:50:16 crc kubenswrapper[4757]: I1006 14:50:16.180376 4757 scope.go:117] "RemoveContainer" containerID="4a9d6ebdcaf823a4eb3fc2bef5a2f0b2046cf043ea1e0ad4d8bbe3e2e5542439" Oct 06 14:50:16 crc kubenswrapper[4757]: E1006 14:50:16.181428 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:50:27 crc kubenswrapper[4757]: I1006 14:50:27.180677 4757 scope.go:117] "RemoveContainer" containerID="4a9d6ebdcaf823a4eb3fc2bef5a2f0b2046cf043ea1e0ad4d8bbe3e2e5542439" Oct 06 14:50:27 crc kubenswrapper[4757]: E1006 14:50:27.181811 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:50:38 crc kubenswrapper[4757]: I1006 14:50:38.180234 4757 scope.go:117] "RemoveContainer" containerID="4a9d6ebdcaf823a4eb3fc2bef5a2f0b2046cf043ea1e0ad4d8bbe3e2e5542439" Oct 06 14:50:38 crc kubenswrapper[4757]: E1006 14:50:38.181943 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:50:52 crc kubenswrapper[4757]: I1006 14:50:52.203397 4757 scope.go:117] "RemoveContainer" containerID="4a9d6ebdcaf823a4eb3fc2bef5a2f0b2046cf043ea1e0ad4d8bbe3e2e5542439" Oct 06 14:50:52 crc kubenswrapper[4757]: E1006 14:50:52.204564 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:51:04 crc kubenswrapper[4757]: I1006 14:51:04.180205 4757 scope.go:117] "RemoveContainer" containerID="4a9d6ebdcaf823a4eb3fc2bef5a2f0b2046cf043ea1e0ad4d8bbe3e2e5542439" Oct 06 14:51:04 crc kubenswrapper[4757]: E1006 14:51:04.180960 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:51:17 crc kubenswrapper[4757]: I1006 14:51:17.180301 4757 scope.go:117] "RemoveContainer" containerID="4a9d6ebdcaf823a4eb3fc2bef5a2f0b2046cf043ea1e0ad4d8bbe3e2e5542439" Oct 06 14:51:17 crc kubenswrapper[4757]: E1006 14:51:17.181402 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:51:31 crc kubenswrapper[4757]: I1006 14:51:31.180578 4757 scope.go:117] "RemoveContainer" containerID="4a9d6ebdcaf823a4eb3fc2bef5a2f0b2046cf043ea1e0ad4d8bbe3e2e5542439" Oct 06 14:51:31 crc kubenswrapper[4757]: E1006 14:51:31.181829 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:51:46 crc kubenswrapper[4757]: I1006 14:51:46.180435 4757 scope.go:117] "RemoveContainer" containerID="4a9d6ebdcaf823a4eb3fc2bef5a2f0b2046cf043ea1e0ad4d8bbe3e2e5542439" Oct 06 14:51:46 crc kubenswrapper[4757]: E1006 14:51:46.181758 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:51:58 crc kubenswrapper[4757]: I1006 14:51:58.181228 4757 scope.go:117] "RemoveContainer" containerID="4a9d6ebdcaf823a4eb3fc2bef5a2f0b2046cf043ea1e0ad4d8bbe3e2e5542439" Oct 06 14:51:58 crc kubenswrapper[4757]: E1006 14:51:58.182134 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:52:13 crc kubenswrapper[4757]: I1006 14:52:13.180373 4757 scope.go:117] "RemoveContainer" containerID="4a9d6ebdcaf823a4eb3fc2bef5a2f0b2046cf043ea1e0ad4d8bbe3e2e5542439" Oct 06 14:52:13 crc kubenswrapper[4757]: E1006 14:52:13.181185 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:52:25 crc kubenswrapper[4757]: I1006 14:52:25.181744 4757 scope.go:117] "RemoveContainer" containerID="4a9d6ebdcaf823a4eb3fc2bef5a2f0b2046cf043ea1e0ad4d8bbe3e2e5542439" Oct 06 14:52:25 crc kubenswrapper[4757]: E1006 14:52:25.185288 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:52:35 crc kubenswrapper[4757]: I1006 14:52:35.078961 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-6hsjq"] Oct 06 14:52:35 crc kubenswrapper[4757]: E1006 14:52:35.081370 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="53a099a3-2246-4e33-a3fe-2c0ff1e780f0" containerName="registry-server" Oct 06 14:52:35 crc kubenswrapper[4757]: I1006 14:52:35.081479 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="53a099a3-2246-4e33-a3fe-2c0ff1e780f0" containerName="registry-server" Oct 06 14:52:35 crc kubenswrapper[4757]: E1006 14:52:35.081583 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="53a099a3-2246-4e33-a3fe-2c0ff1e780f0" containerName="extract-utilities" Oct 06 14:52:35 crc kubenswrapper[4757]: I1006 14:52:35.081657 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="53a099a3-2246-4e33-a3fe-2c0ff1e780f0" containerName="extract-utilities" Oct 06 14:52:35 crc kubenswrapper[4757]: E1006 14:52:35.081750 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="53a099a3-2246-4e33-a3fe-2c0ff1e780f0" containerName="extract-content" Oct 06 14:52:35 crc kubenswrapper[4757]: I1006 14:52:35.081825 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="53a099a3-2246-4e33-a3fe-2c0ff1e780f0" containerName="extract-content" Oct 06 14:52:35 crc kubenswrapper[4757]: I1006 14:52:35.082061 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="53a099a3-2246-4e33-a3fe-2c0ff1e780f0" containerName="registry-server" Oct 06 14:52:35 crc kubenswrapper[4757]: I1006 14:52:35.083580 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6hsjq" Oct 06 14:52:35 crc kubenswrapper[4757]: I1006 14:52:35.093484 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-6hsjq"] Oct 06 14:52:35 crc kubenswrapper[4757]: I1006 14:52:35.181619 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e178c569-200f-4d92-b90a-c3bdcf03027c-utilities\") pod \"redhat-marketplace-6hsjq\" (UID: \"e178c569-200f-4d92-b90a-c3bdcf03027c\") " pod="openshift-marketplace/redhat-marketplace-6hsjq" Oct 06 14:52:35 crc kubenswrapper[4757]: I1006 14:52:35.181663 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e178c569-200f-4d92-b90a-c3bdcf03027c-catalog-content\") pod \"redhat-marketplace-6hsjq\" (UID: \"e178c569-200f-4d92-b90a-c3bdcf03027c\") " pod="openshift-marketplace/redhat-marketplace-6hsjq" Oct 06 14:52:35 crc kubenswrapper[4757]: I1006 14:52:35.181769 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kjc54\" (UniqueName: \"kubernetes.io/projected/e178c569-200f-4d92-b90a-c3bdcf03027c-kube-api-access-kjc54\") pod \"redhat-marketplace-6hsjq\" (UID: \"e178c569-200f-4d92-b90a-c3bdcf03027c\") " pod="openshift-marketplace/redhat-marketplace-6hsjq" Oct 06 14:52:35 crc kubenswrapper[4757]: I1006 14:52:35.283563 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e178c569-200f-4d92-b90a-c3bdcf03027c-utilities\") pod \"redhat-marketplace-6hsjq\" (UID: \"e178c569-200f-4d92-b90a-c3bdcf03027c\") " pod="openshift-marketplace/redhat-marketplace-6hsjq" Oct 06 14:52:35 crc kubenswrapper[4757]: I1006 14:52:35.283620 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e178c569-200f-4d92-b90a-c3bdcf03027c-catalog-content\") pod \"redhat-marketplace-6hsjq\" (UID: \"e178c569-200f-4d92-b90a-c3bdcf03027c\") " pod="openshift-marketplace/redhat-marketplace-6hsjq" Oct 06 14:52:35 crc kubenswrapper[4757]: I1006 14:52:35.283721 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kjc54\" (UniqueName: \"kubernetes.io/projected/e178c569-200f-4d92-b90a-c3bdcf03027c-kube-api-access-kjc54\") pod \"redhat-marketplace-6hsjq\" (UID: \"e178c569-200f-4d92-b90a-c3bdcf03027c\") " pod="openshift-marketplace/redhat-marketplace-6hsjq" Oct 06 14:52:35 crc kubenswrapper[4757]: I1006 14:52:35.284415 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e178c569-200f-4d92-b90a-c3bdcf03027c-catalog-content\") pod \"redhat-marketplace-6hsjq\" (UID: \"e178c569-200f-4d92-b90a-c3bdcf03027c\") " pod="openshift-marketplace/redhat-marketplace-6hsjq" Oct 06 14:52:35 crc kubenswrapper[4757]: I1006 14:52:35.284427 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e178c569-200f-4d92-b90a-c3bdcf03027c-utilities\") pod \"redhat-marketplace-6hsjq\" (UID: \"e178c569-200f-4d92-b90a-c3bdcf03027c\") " pod="openshift-marketplace/redhat-marketplace-6hsjq" Oct 06 14:52:35 crc kubenswrapper[4757]: I1006 14:52:35.304244 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kjc54\" (UniqueName: \"kubernetes.io/projected/e178c569-200f-4d92-b90a-c3bdcf03027c-kube-api-access-kjc54\") pod \"redhat-marketplace-6hsjq\" (UID: \"e178c569-200f-4d92-b90a-c3bdcf03027c\") " pod="openshift-marketplace/redhat-marketplace-6hsjq" Oct 06 14:52:35 crc kubenswrapper[4757]: I1006 14:52:35.420018 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6hsjq" Oct 06 14:52:35 crc kubenswrapper[4757]: I1006 14:52:35.824636 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-6hsjq"] Oct 06 14:52:36 crc kubenswrapper[4757]: I1006 14:52:36.180901 4757 scope.go:117] "RemoveContainer" containerID="4a9d6ebdcaf823a4eb3fc2bef5a2f0b2046cf043ea1e0ad4d8bbe3e2e5542439" Oct 06 14:52:36 crc kubenswrapper[4757]: E1006 14:52:36.181461 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:52:36 crc kubenswrapper[4757]: I1006 14:52:36.450107 4757 generic.go:334] "Generic (PLEG): container finished" podID="e178c569-200f-4d92-b90a-c3bdcf03027c" containerID="ef083eaf7d4872a5896dd384be5ca31ca7431f9791528deaf0938913eb2c7769" exitCode=0 Oct 06 14:52:36 crc kubenswrapper[4757]: I1006 14:52:36.450145 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6hsjq" event={"ID":"e178c569-200f-4d92-b90a-c3bdcf03027c","Type":"ContainerDied","Data":"ef083eaf7d4872a5896dd384be5ca31ca7431f9791528deaf0938913eb2c7769"} Oct 06 14:52:36 crc kubenswrapper[4757]: I1006 14:52:36.450169 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6hsjq" event={"ID":"e178c569-200f-4d92-b90a-c3bdcf03027c","Type":"ContainerStarted","Data":"f0e6c6b59f20793ca6c978fad28b776978c837ced1b730fdb4bf735054582309"} Oct 06 14:52:36 crc kubenswrapper[4757]: I1006 14:52:36.452663 4757 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 06 14:52:38 crc kubenswrapper[4757]: I1006 14:52:38.468225 4757 generic.go:334] "Generic (PLEG): container finished" podID="e178c569-200f-4d92-b90a-c3bdcf03027c" containerID="5d3babaa6865387961760cef094e2f285d85f1b7a128aea30ede84673b99baf2" exitCode=0 Oct 06 14:52:38 crc kubenswrapper[4757]: I1006 14:52:38.468295 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6hsjq" event={"ID":"e178c569-200f-4d92-b90a-c3bdcf03027c","Type":"ContainerDied","Data":"5d3babaa6865387961760cef094e2f285d85f1b7a128aea30ede84673b99baf2"} Oct 06 14:52:39 crc kubenswrapper[4757]: I1006 14:52:39.478525 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6hsjq" event={"ID":"e178c569-200f-4d92-b90a-c3bdcf03027c","Type":"ContainerStarted","Data":"d906af81146f45395556ea91bbaab1cb01b3a9cf2bd428bc8e6820fab4fc3043"} Oct 06 14:52:39 crc kubenswrapper[4757]: I1006 14:52:39.510937 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-6hsjq" podStartSLOduration=2.070982653 podStartE2EDuration="4.510875196s" podCreationTimestamp="2025-10-06 14:52:35 +0000 UTC" firstStartedPulling="2025-10-06 14:52:36.452257725 +0000 UTC m=+4444.949576292" lastFinishedPulling="2025-10-06 14:52:38.892150298 +0000 UTC m=+4447.389468835" observedRunningTime="2025-10-06 14:52:39.504367103 +0000 UTC m=+4448.001685640" watchObservedRunningTime="2025-10-06 14:52:39.510875196 +0000 UTC m=+4448.008193773" Oct 06 14:52:45 crc kubenswrapper[4757]: I1006 14:52:45.420406 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-6hsjq" Oct 06 14:52:45 crc kubenswrapper[4757]: I1006 14:52:45.421071 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-6hsjq" Oct 06 14:52:45 crc kubenswrapper[4757]: I1006 14:52:45.483315 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-6hsjq" Oct 06 14:52:45 crc kubenswrapper[4757]: I1006 14:52:45.575777 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-6hsjq" Oct 06 14:52:45 crc kubenswrapper[4757]: I1006 14:52:45.721832 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-6hsjq"] Oct 06 14:52:47 crc kubenswrapper[4757]: I1006 14:52:47.546632 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-6hsjq" podUID="e178c569-200f-4d92-b90a-c3bdcf03027c" containerName="registry-server" containerID="cri-o://d906af81146f45395556ea91bbaab1cb01b3a9cf2bd428bc8e6820fab4fc3043" gracePeriod=2 Oct 06 14:52:48 crc kubenswrapper[4757]: I1006 14:52:48.559917 4757 generic.go:334] "Generic (PLEG): container finished" podID="e178c569-200f-4d92-b90a-c3bdcf03027c" containerID="d906af81146f45395556ea91bbaab1cb01b3a9cf2bd428bc8e6820fab4fc3043" exitCode=0 Oct 06 14:52:48 crc kubenswrapper[4757]: I1006 14:52:48.559971 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6hsjq" event={"ID":"e178c569-200f-4d92-b90a-c3bdcf03027c","Type":"ContainerDied","Data":"d906af81146f45395556ea91bbaab1cb01b3a9cf2bd428bc8e6820fab4fc3043"} Oct 06 14:52:48 crc kubenswrapper[4757]: I1006 14:52:48.681634 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6hsjq" Oct 06 14:52:48 crc kubenswrapper[4757]: I1006 14:52:48.790882 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kjc54\" (UniqueName: \"kubernetes.io/projected/e178c569-200f-4d92-b90a-c3bdcf03027c-kube-api-access-kjc54\") pod \"e178c569-200f-4d92-b90a-c3bdcf03027c\" (UID: \"e178c569-200f-4d92-b90a-c3bdcf03027c\") " Oct 06 14:52:48 crc kubenswrapper[4757]: I1006 14:52:48.790939 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e178c569-200f-4d92-b90a-c3bdcf03027c-utilities\") pod \"e178c569-200f-4d92-b90a-c3bdcf03027c\" (UID: \"e178c569-200f-4d92-b90a-c3bdcf03027c\") " Oct 06 14:52:48 crc kubenswrapper[4757]: I1006 14:52:48.791008 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e178c569-200f-4d92-b90a-c3bdcf03027c-catalog-content\") pod \"e178c569-200f-4d92-b90a-c3bdcf03027c\" (UID: \"e178c569-200f-4d92-b90a-c3bdcf03027c\") " Oct 06 14:52:48 crc kubenswrapper[4757]: I1006 14:52:48.792254 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e178c569-200f-4d92-b90a-c3bdcf03027c-utilities" (OuterVolumeSpecName: "utilities") pod "e178c569-200f-4d92-b90a-c3bdcf03027c" (UID: "e178c569-200f-4d92-b90a-c3bdcf03027c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 14:52:48 crc kubenswrapper[4757]: I1006 14:52:48.795040 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e178c569-200f-4d92-b90a-c3bdcf03027c-kube-api-access-kjc54" (OuterVolumeSpecName: "kube-api-access-kjc54") pod "e178c569-200f-4d92-b90a-c3bdcf03027c" (UID: "e178c569-200f-4d92-b90a-c3bdcf03027c"). InnerVolumeSpecName "kube-api-access-kjc54". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 14:52:48 crc kubenswrapper[4757]: I1006 14:52:48.804773 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e178c569-200f-4d92-b90a-c3bdcf03027c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e178c569-200f-4d92-b90a-c3bdcf03027c" (UID: "e178c569-200f-4d92-b90a-c3bdcf03027c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 14:52:48 crc kubenswrapper[4757]: I1006 14:52:48.892036 4757 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e178c569-200f-4d92-b90a-c3bdcf03027c-utilities\") on node \"crc\" DevicePath \"\"" Oct 06 14:52:48 crc kubenswrapper[4757]: I1006 14:52:48.892077 4757 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e178c569-200f-4d92-b90a-c3bdcf03027c-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 06 14:52:48 crc kubenswrapper[4757]: I1006 14:52:48.892110 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kjc54\" (UniqueName: \"kubernetes.io/projected/e178c569-200f-4d92-b90a-c3bdcf03027c-kube-api-access-kjc54\") on node \"crc\" DevicePath \"\"" Oct 06 14:52:49 crc kubenswrapper[4757]: I1006 14:52:49.180803 4757 scope.go:117] "RemoveContainer" containerID="4a9d6ebdcaf823a4eb3fc2bef5a2f0b2046cf043ea1e0ad4d8bbe3e2e5542439" Oct 06 14:52:49 crc kubenswrapper[4757]: E1006 14:52:49.181226 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:52:49 crc kubenswrapper[4757]: I1006 14:52:49.571771 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6hsjq" Oct 06 14:52:49 crc kubenswrapper[4757]: I1006 14:52:49.571772 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6hsjq" event={"ID":"e178c569-200f-4d92-b90a-c3bdcf03027c","Type":"ContainerDied","Data":"f0e6c6b59f20793ca6c978fad28b776978c837ced1b730fdb4bf735054582309"} Oct 06 14:52:49 crc kubenswrapper[4757]: I1006 14:52:49.571855 4757 scope.go:117] "RemoveContainer" containerID="d906af81146f45395556ea91bbaab1cb01b3a9cf2bd428bc8e6820fab4fc3043" Oct 06 14:52:49 crc kubenswrapper[4757]: I1006 14:52:49.610623 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-6hsjq"] Oct 06 14:52:49 crc kubenswrapper[4757]: I1006 14:52:49.611228 4757 scope.go:117] "RemoveContainer" containerID="5d3babaa6865387961760cef094e2f285d85f1b7a128aea30ede84673b99baf2" Oct 06 14:52:49 crc kubenswrapper[4757]: I1006 14:52:49.615405 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-6hsjq"] Oct 06 14:52:49 crc kubenswrapper[4757]: I1006 14:52:49.638417 4757 scope.go:117] "RemoveContainer" containerID="ef083eaf7d4872a5896dd384be5ca31ca7431f9791528deaf0938913eb2c7769" Oct 06 14:52:50 crc kubenswrapper[4757]: I1006 14:52:50.197921 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e178c569-200f-4d92-b90a-c3bdcf03027c" path="/var/lib/kubelet/pods/e178c569-200f-4d92-b90a-c3bdcf03027c/volumes" Oct 06 14:53:03 crc kubenswrapper[4757]: I1006 14:53:03.179930 4757 scope.go:117] "RemoveContainer" containerID="4a9d6ebdcaf823a4eb3fc2bef5a2f0b2046cf043ea1e0ad4d8bbe3e2e5542439" Oct 06 14:53:03 crc kubenswrapper[4757]: E1006 14:53:03.180858 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:53:14 crc kubenswrapper[4757]: I1006 14:53:14.008471 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-4vg9s"] Oct 06 14:53:14 crc kubenswrapper[4757]: E1006 14:53:14.009445 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e178c569-200f-4d92-b90a-c3bdcf03027c" containerName="extract-content" Oct 06 14:53:14 crc kubenswrapper[4757]: I1006 14:53:14.009460 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="e178c569-200f-4d92-b90a-c3bdcf03027c" containerName="extract-content" Oct 06 14:53:14 crc kubenswrapper[4757]: E1006 14:53:14.009479 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e178c569-200f-4d92-b90a-c3bdcf03027c" containerName="registry-server" Oct 06 14:53:14 crc kubenswrapper[4757]: I1006 14:53:14.009489 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="e178c569-200f-4d92-b90a-c3bdcf03027c" containerName="registry-server" Oct 06 14:53:14 crc kubenswrapper[4757]: E1006 14:53:14.009501 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e178c569-200f-4d92-b90a-c3bdcf03027c" containerName="extract-utilities" Oct 06 14:53:14 crc kubenswrapper[4757]: I1006 14:53:14.009509 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="e178c569-200f-4d92-b90a-c3bdcf03027c" containerName="extract-utilities" Oct 06 14:53:14 crc kubenswrapper[4757]: I1006 14:53:14.009697 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="e178c569-200f-4d92-b90a-c3bdcf03027c" containerName="registry-server" Oct 06 14:53:14 crc kubenswrapper[4757]: I1006 14:53:14.010857 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4vg9s" Oct 06 14:53:14 crc kubenswrapper[4757]: I1006 14:53:14.026084 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-4vg9s"] Oct 06 14:53:14 crc kubenswrapper[4757]: I1006 14:53:14.097773 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nchsk\" (UniqueName: \"kubernetes.io/projected/5fdcc59d-4cf7-46c1-af6d-4f3fcace4e22-kube-api-access-nchsk\") pod \"redhat-operators-4vg9s\" (UID: \"5fdcc59d-4cf7-46c1-af6d-4f3fcace4e22\") " pod="openshift-marketplace/redhat-operators-4vg9s" Oct 06 14:53:14 crc kubenswrapper[4757]: I1006 14:53:14.097826 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5fdcc59d-4cf7-46c1-af6d-4f3fcace4e22-utilities\") pod \"redhat-operators-4vg9s\" (UID: \"5fdcc59d-4cf7-46c1-af6d-4f3fcace4e22\") " pod="openshift-marketplace/redhat-operators-4vg9s" Oct 06 14:53:14 crc kubenswrapper[4757]: I1006 14:53:14.097854 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5fdcc59d-4cf7-46c1-af6d-4f3fcace4e22-catalog-content\") pod \"redhat-operators-4vg9s\" (UID: \"5fdcc59d-4cf7-46c1-af6d-4f3fcace4e22\") " pod="openshift-marketplace/redhat-operators-4vg9s" Oct 06 14:53:14 crc kubenswrapper[4757]: I1006 14:53:14.199189 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nchsk\" (UniqueName: \"kubernetes.io/projected/5fdcc59d-4cf7-46c1-af6d-4f3fcace4e22-kube-api-access-nchsk\") pod \"redhat-operators-4vg9s\" (UID: \"5fdcc59d-4cf7-46c1-af6d-4f3fcace4e22\") " pod="openshift-marketplace/redhat-operators-4vg9s" Oct 06 14:53:14 crc kubenswrapper[4757]: I1006 14:53:14.199279 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5fdcc59d-4cf7-46c1-af6d-4f3fcace4e22-utilities\") pod \"redhat-operators-4vg9s\" (UID: \"5fdcc59d-4cf7-46c1-af6d-4f3fcace4e22\") " pod="openshift-marketplace/redhat-operators-4vg9s" Oct 06 14:53:14 crc kubenswrapper[4757]: I1006 14:53:14.199320 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5fdcc59d-4cf7-46c1-af6d-4f3fcace4e22-catalog-content\") pod \"redhat-operators-4vg9s\" (UID: \"5fdcc59d-4cf7-46c1-af6d-4f3fcace4e22\") " pod="openshift-marketplace/redhat-operators-4vg9s" Oct 06 14:53:14 crc kubenswrapper[4757]: I1006 14:53:14.200177 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5fdcc59d-4cf7-46c1-af6d-4f3fcace4e22-utilities\") pod \"redhat-operators-4vg9s\" (UID: \"5fdcc59d-4cf7-46c1-af6d-4f3fcace4e22\") " pod="openshift-marketplace/redhat-operators-4vg9s" Oct 06 14:53:14 crc kubenswrapper[4757]: I1006 14:53:14.200286 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5fdcc59d-4cf7-46c1-af6d-4f3fcace4e22-catalog-content\") pod \"redhat-operators-4vg9s\" (UID: \"5fdcc59d-4cf7-46c1-af6d-4f3fcace4e22\") " pod="openshift-marketplace/redhat-operators-4vg9s" Oct 06 14:53:14 crc kubenswrapper[4757]: I1006 14:53:14.345117 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nchsk\" (UniqueName: \"kubernetes.io/projected/5fdcc59d-4cf7-46c1-af6d-4f3fcace4e22-kube-api-access-nchsk\") pod \"redhat-operators-4vg9s\" (UID: \"5fdcc59d-4cf7-46c1-af6d-4f3fcace4e22\") " pod="openshift-marketplace/redhat-operators-4vg9s" Oct 06 14:53:14 crc kubenswrapper[4757]: I1006 14:53:14.346529 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4vg9s" Oct 06 14:53:14 crc kubenswrapper[4757]: I1006 14:53:14.934025 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-4vg9s"] Oct 06 14:53:15 crc kubenswrapper[4757]: I1006 14:53:15.848918 4757 generic.go:334] "Generic (PLEG): container finished" podID="5fdcc59d-4cf7-46c1-af6d-4f3fcace4e22" containerID="3f1e48be69bb2fcf37f697862e910cc5f8d65889f49f067fb3946a2c89ec47bc" exitCode=0 Oct 06 14:53:15 crc kubenswrapper[4757]: I1006 14:53:15.848998 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4vg9s" event={"ID":"5fdcc59d-4cf7-46c1-af6d-4f3fcace4e22","Type":"ContainerDied","Data":"3f1e48be69bb2fcf37f697862e910cc5f8d65889f49f067fb3946a2c89ec47bc"} Oct 06 14:53:15 crc kubenswrapper[4757]: I1006 14:53:15.849257 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4vg9s" event={"ID":"5fdcc59d-4cf7-46c1-af6d-4f3fcace4e22","Type":"ContainerStarted","Data":"e82048484a52cf3e70666518bba55ca2c69b9bd18e873ae2bb26a5f797b6b50d"} Oct 06 14:53:17 crc kubenswrapper[4757]: I1006 14:53:17.180064 4757 scope.go:117] "RemoveContainer" containerID="4a9d6ebdcaf823a4eb3fc2bef5a2f0b2046cf043ea1e0ad4d8bbe3e2e5542439" Oct 06 14:53:17 crc kubenswrapper[4757]: E1006 14:53:17.180307 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:53:17 crc kubenswrapper[4757]: I1006 14:53:17.865589 4757 generic.go:334] "Generic (PLEG): container finished" podID="5fdcc59d-4cf7-46c1-af6d-4f3fcace4e22" containerID="a7cc13290d5d19c8c6f418d0b77ca75509e27f0839757434a821fe8ad64f7e24" exitCode=0 Oct 06 14:53:17 crc kubenswrapper[4757]: I1006 14:53:17.865639 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4vg9s" event={"ID":"5fdcc59d-4cf7-46c1-af6d-4f3fcace4e22","Type":"ContainerDied","Data":"a7cc13290d5d19c8c6f418d0b77ca75509e27f0839757434a821fe8ad64f7e24"} Oct 06 14:53:18 crc kubenswrapper[4757]: I1006 14:53:18.878349 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4vg9s" event={"ID":"5fdcc59d-4cf7-46c1-af6d-4f3fcace4e22","Type":"ContainerStarted","Data":"5844c07e675e5e3cea9119961cabee9eacd7e9aa93827f1790df923b197616ac"} Oct 06 14:53:18 crc kubenswrapper[4757]: I1006 14:53:18.909308 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-4vg9s" podStartSLOduration=3.262690472 podStartE2EDuration="5.909282812s" podCreationTimestamp="2025-10-06 14:53:13 +0000 UTC" firstStartedPulling="2025-10-06 14:53:15.851286932 +0000 UTC m=+4484.348605489" lastFinishedPulling="2025-10-06 14:53:18.497879282 +0000 UTC m=+4486.995197829" observedRunningTime="2025-10-06 14:53:18.897959904 +0000 UTC m=+4487.395278451" watchObservedRunningTime="2025-10-06 14:53:18.909282812 +0000 UTC m=+4487.406601369" Oct 06 14:53:24 crc kubenswrapper[4757]: I1006 14:53:24.346724 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-4vg9s" Oct 06 14:53:24 crc kubenswrapper[4757]: I1006 14:53:24.347370 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-4vg9s" Oct 06 14:53:24 crc kubenswrapper[4757]: I1006 14:53:24.830496 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-4vg9s" Oct 06 14:53:24 crc kubenswrapper[4757]: I1006 14:53:24.954970 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-4vg9s" Oct 06 14:53:25 crc kubenswrapper[4757]: I1006 14:53:25.068421 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-4vg9s"] Oct 06 14:53:26 crc kubenswrapper[4757]: I1006 14:53:26.934704 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-4vg9s" podUID="5fdcc59d-4cf7-46c1-af6d-4f3fcace4e22" containerName="registry-server" containerID="cri-o://5844c07e675e5e3cea9119961cabee9eacd7e9aa93827f1790df923b197616ac" gracePeriod=2 Oct 06 14:53:27 crc kubenswrapper[4757]: I1006 14:53:27.305624 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4vg9s" Oct 06 14:53:27 crc kubenswrapper[4757]: I1006 14:53:27.397938 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5fdcc59d-4cf7-46c1-af6d-4f3fcace4e22-utilities\") pod \"5fdcc59d-4cf7-46c1-af6d-4f3fcace4e22\" (UID: \"5fdcc59d-4cf7-46c1-af6d-4f3fcace4e22\") " Oct 06 14:53:27 crc kubenswrapper[4757]: I1006 14:53:27.398129 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5fdcc59d-4cf7-46c1-af6d-4f3fcace4e22-catalog-content\") pod \"5fdcc59d-4cf7-46c1-af6d-4f3fcace4e22\" (UID: \"5fdcc59d-4cf7-46c1-af6d-4f3fcace4e22\") " Oct 06 14:53:27 crc kubenswrapper[4757]: I1006 14:53:27.398177 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nchsk\" (UniqueName: \"kubernetes.io/projected/5fdcc59d-4cf7-46c1-af6d-4f3fcace4e22-kube-api-access-nchsk\") pod \"5fdcc59d-4cf7-46c1-af6d-4f3fcace4e22\" (UID: \"5fdcc59d-4cf7-46c1-af6d-4f3fcace4e22\") " Oct 06 14:53:27 crc kubenswrapper[4757]: I1006 14:53:27.399352 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5fdcc59d-4cf7-46c1-af6d-4f3fcace4e22-utilities" (OuterVolumeSpecName: "utilities") pod "5fdcc59d-4cf7-46c1-af6d-4f3fcace4e22" (UID: "5fdcc59d-4cf7-46c1-af6d-4f3fcace4e22"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 14:53:27 crc kubenswrapper[4757]: I1006 14:53:27.405256 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fdcc59d-4cf7-46c1-af6d-4f3fcace4e22-kube-api-access-nchsk" (OuterVolumeSpecName: "kube-api-access-nchsk") pod "5fdcc59d-4cf7-46c1-af6d-4f3fcace4e22" (UID: "5fdcc59d-4cf7-46c1-af6d-4f3fcace4e22"). InnerVolumeSpecName "kube-api-access-nchsk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 14:53:27 crc kubenswrapper[4757]: I1006 14:53:27.499997 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nchsk\" (UniqueName: \"kubernetes.io/projected/5fdcc59d-4cf7-46c1-af6d-4f3fcace4e22-kube-api-access-nchsk\") on node \"crc\" DevicePath \"\"" Oct 06 14:53:27 crc kubenswrapper[4757]: I1006 14:53:27.500029 4757 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5fdcc59d-4cf7-46c1-af6d-4f3fcace4e22-utilities\") on node \"crc\" DevicePath \"\"" Oct 06 14:53:27 crc kubenswrapper[4757]: I1006 14:53:27.945925 4757 generic.go:334] "Generic (PLEG): container finished" podID="5fdcc59d-4cf7-46c1-af6d-4f3fcace4e22" containerID="5844c07e675e5e3cea9119961cabee9eacd7e9aa93827f1790df923b197616ac" exitCode=0 Oct 06 14:53:27 crc kubenswrapper[4757]: I1006 14:53:27.945986 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4vg9s" event={"ID":"5fdcc59d-4cf7-46c1-af6d-4f3fcace4e22","Type":"ContainerDied","Data":"5844c07e675e5e3cea9119961cabee9eacd7e9aa93827f1790df923b197616ac"} Oct 06 14:53:27 crc kubenswrapper[4757]: I1006 14:53:27.946050 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4vg9s" event={"ID":"5fdcc59d-4cf7-46c1-af6d-4f3fcace4e22","Type":"ContainerDied","Data":"e82048484a52cf3e70666518bba55ca2c69b9bd18e873ae2bb26a5f797b6b50d"} Oct 06 14:53:27 crc kubenswrapper[4757]: I1006 14:53:27.946073 4757 scope.go:117] "RemoveContainer" containerID="5844c07e675e5e3cea9119961cabee9eacd7e9aa93827f1790df923b197616ac" Oct 06 14:53:27 crc kubenswrapper[4757]: I1006 14:53:27.948286 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4vg9s" Oct 06 14:53:27 crc kubenswrapper[4757]: I1006 14:53:27.974447 4757 scope.go:117] "RemoveContainer" containerID="a7cc13290d5d19c8c6f418d0b77ca75509e27f0839757434a821fe8ad64f7e24" Oct 06 14:53:28 crc kubenswrapper[4757]: I1006 14:53:28.004212 4757 scope.go:117] "RemoveContainer" containerID="3f1e48be69bb2fcf37f697862e910cc5f8d65889f49f067fb3946a2c89ec47bc" Oct 06 14:53:28 crc kubenswrapper[4757]: I1006 14:53:28.030247 4757 scope.go:117] "RemoveContainer" containerID="5844c07e675e5e3cea9119961cabee9eacd7e9aa93827f1790df923b197616ac" Oct 06 14:53:28 crc kubenswrapper[4757]: E1006 14:53:28.032022 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5844c07e675e5e3cea9119961cabee9eacd7e9aa93827f1790df923b197616ac\": container with ID starting with 5844c07e675e5e3cea9119961cabee9eacd7e9aa93827f1790df923b197616ac not found: ID does not exist" containerID="5844c07e675e5e3cea9119961cabee9eacd7e9aa93827f1790df923b197616ac" Oct 06 14:53:28 crc kubenswrapper[4757]: I1006 14:53:28.032080 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5844c07e675e5e3cea9119961cabee9eacd7e9aa93827f1790df923b197616ac"} err="failed to get container status \"5844c07e675e5e3cea9119961cabee9eacd7e9aa93827f1790df923b197616ac\": rpc error: code = NotFound desc = could not find container \"5844c07e675e5e3cea9119961cabee9eacd7e9aa93827f1790df923b197616ac\": container with ID starting with 5844c07e675e5e3cea9119961cabee9eacd7e9aa93827f1790df923b197616ac not found: ID does not exist" Oct 06 14:53:28 crc kubenswrapper[4757]: I1006 14:53:28.032150 4757 scope.go:117] "RemoveContainer" containerID="a7cc13290d5d19c8c6f418d0b77ca75509e27f0839757434a821fe8ad64f7e24" Oct 06 14:53:28 crc kubenswrapper[4757]: E1006 14:53:28.032616 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a7cc13290d5d19c8c6f418d0b77ca75509e27f0839757434a821fe8ad64f7e24\": container with ID starting with a7cc13290d5d19c8c6f418d0b77ca75509e27f0839757434a821fe8ad64f7e24 not found: ID does not exist" containerID="a7cc13290d5d19c8c6f418d0b77ca75509e27f0839757434a821fe8ad64f7e24" Oct 06 14:53:28 crc kubenswrapper[4757]: I1006 14:53:28.032652 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a7cc13290d5d19c8c6f418d0b77ca75509e27f0839757434a821fe8ad64f7e24"} err="failed to get container status \"a7cc13290d5d19c8c6f418d0b77ca75509e27f0839757434a821fe8ad64f7e24\": rpc error: code = NotFound desc = could not find container \"a7cc13290d5d19c8c6f418d0b77ca75509e27f0839757434a821fe8ad64f7e24\": container with ID starting with a7cc13290d5d19c8c6f418d0b77ca75509e27f0839757434a821fe8ad64f7e24 not found: ID does not exist" Oct 06 14:53:28 crc kubenswrapper[4757]: I1006 14:53:28.032674 4757 scope.go:117] "RemoveContainer" containerID="3f1e48be69bb2fcf37f697862e910cc5f8d65889f49f067fb3946a2c89ec47bc" Oct 06 14:53:28 crc kubenswrapper[4757]: E1006 14:53:28.032930 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3f1e48be69bb2fcf37f697862e910cc5f8d65889f49f067fb3946a2c89ec47bc\": container with ID starting with 3f1e48be69bb2fcf37f697862e910cc5f8d65889f49f067fb3946a2c89ec47bc not found: ID does not exist" containerID="3f1e48be69bb2fcf37f697862e910cc5f8d65889f49f067fb3946a2c89ec47bc" Oct 06 14:53:28 crc kubenswrapper[4757]: I1006 14:53:28.032967 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3f1e48be69bb2fcf37f697862e910cc5f8d65889f49f067fb3946a2c89ec47bc"} err="failed to get container status \"3f1e48be69bb2fcf37f697862e910cc5f8d65889f49f067fb3946a2c89ec47bc\": rpc error: code = NotFound desc = could not find container \"3f1e48be69bb2fcf37f697862e910cc5f8d65889f49f067fb3946a2c89ec47bc\": container with ID starting with 3f1e48be69bb2fcf37f697862e910cc5f8d65889f49f067fb3946a2c89ec47bc not found: ID does not exist" Oct 06 14:53:28 crc kubenswrapper[4757]: I1006 14:53:28.179831 4757 scope.go:117] "RemoveContainer" containerID="4a9d6ebdcaf823a4eb3fc2bef5a2f0b2046cf043ea1e0ad4d8bbe3e2e5542439" Oct 06 14:53:28 crc kubenswrapper[4757]: E1006 14:53:28.180253 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:53:28 crc kubenswrapper[4757]: I1006 14:53:28.521525 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5fdcc59d-4cf7-46c1-af6d-4f3fcace4e22-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5fdcc59d-4cf7-46c1-af6d-4f3fcace4e22" (UID: "5fdcc59d-4cf7-46c1-af6d-4f3fcace4e22"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 14:53:28 crc kubenswrapper[4757]: I1006 14:53:28.589579 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-4vg9s"] Oct 06 14:53:28 crc kubenswrapper[4757]: I1006 14:53:28.601846 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-4vg9s"] Oct 06 14:53:28 crc kubenswrapper[4757]: I1006 14:53:28.616920 4757 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5fdcc59d-4cf7-46c1-af6d-4f3fcace4e22-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 06 14:53:30 crc kubenswrapper[4757]: I1006 14:53:30.193793 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fdcc59d-4cf7-46c1-af6d-4f3fcace4e22" path="/var/lib/kubelet/pods/5fdcc59d-4cf7-46c1-af6d-4f3fcace4e22/volumes" Oct 06 14:53:40 crc kubenswrapper[4757]: I1006 14:53:40.183363 4757 scope.go:117] "RemoveContainer" containerID="4a9d6ebdcaf823a4eb3fc2bef5a2f0b2046cf043ea1e0ad4d8bbe3e2e5542439" Oct 06 14:53:40 crc kubenswrapper[4757]: E1006 14:53:40.183997 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:53:51 crc kubenswrapper[4757]: I1006 14:53:51.179550 4757 scope.go:117] "RemoveContainer" containerID="4a9d6ebdcaf823a4eb3fc2bef5a2f0b2046cf043ea1e0ad4d8bbe3e2e5542439" Oct 06 14:53:51 crc kubenswrapper[4757]: E1006 14:53:51.180464 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:54:06 crc kubenswrapper[4757]: I1006 14:54:06.180580 4757 scope.go:117] "RemoveContainer" containerID="4a9d6ebdcaf823a4eb3fc2bef5a2f0b2046cf043ea1e0ad4d8bbe3e2e5542439" Oct 06 14:54:06 crc kubenswrapper[4757]: E1006 14:54:06.181372 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:54:18 crc kubenswrapper[4757]: I1006 14:54:18.180305 4757 scope.go:117] "RemoveContainer" containerID="4a9d6ebdcaf823a4eb3fc2bef5a2f0b2046cf043ea1e0ad4d8bbe3e2e5542439" Oct 06 14:54:18 crc kubenswrapper[4757]: E1006 14:54:18.181350 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:54:33 crc kubenswrapper[4757]: I1006 14:54:33.181308 4757 scope.go:117] "RemoveContainer" containerID="4a9d6ebdcaf823a4eb3fc2bef5a2f0b2046cf043ea1e0ad4d8bbe3e2e5542439" Oct 06 14:54:33 crc kubenswrapper[4757]: E1006 14:54:33.182478 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:54:47 crc kubenswrapper[4757]: I1006 14:54:47.180604 4757 scope.go:117] "RemoveContainer" containerID="4a9d6ebdcaf823a4eb3fc2bef5a2f0b2046cf043ea1e0ad4d8bbe3e2e5542439" Oct 06 14:54:47 crc kubenswrapper[4757]: E1006 14:54:47.181527 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:54:59 crc kubenswrapper[4757]: I1006 14:54:59.180158 4757 scope.go:117] "RemoveContainer" containerID="4a9d6ebdcaf823a4eb3fc2bef5a2f0b2046cf043ea1e0ad4d8bbe3e2e5542439" Oct 06 14:54:59 crc kubenswrapper[4757]: E1006 14:54:59.181011 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 14:55:14 crc kubenswrapper[4757]: I1006 14:55:14.180827 4757 scope.go:117] "RemoveContainer" containerID="4a9d6ebdcaf823a4eb3fc2bef5a2f0b2046cf043ea1e0ad4d8bbe3e2e5542439" Oct 06 14:55:14 crc kubenswrapper[4757]: I1006 14:55:14.862616 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" event={"ID":"0010c888-d5ad-4b2b-8309-1647fdf0dee3","Type":"ContainerStarted","Data":"bedffa6bb0c8f2dc43b319e7042746c9f10fca403f3f6e4886f8e811128a83a1"} Oct 06 14:55:50 crc kubenswrapper[4757]: I1006 14:55:50.300396 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-b8tmj"] Oct 06 14:55:50 crc kubenswrapper[4757]: E1006 14:55:50.301194 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5fdcc59d-4cf7-46c1-af6d-4f3fcace4e22" containerName="registry-server" Oct 06 14:55:50 crc kubenswrapper[4757]: I1006 14:55:50.301211 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="5fdcc59d-4cf7-46c1-af6d-4f3fcace4e22" containerName="registry-server" Oct 06 14:55:50 crc kubenswrapper[4757]: E1006 14:55:50.301222 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5fdcc59d-4cf7-46c1-af6d-4f3fcace4e22" containerName="extract-content" Oct 06 14:55:50 crc kubenswrapper[4757]: I1006 14:55:50.301243 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="5fdcc59d-4cf7-46c1-af6d-4f3fcace4e22" containerName="extract-content" Oct 06 14:55:50 crc kubenswrapper[4757]: E1006 14:55:50.301260 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5fdcc59d-4cf7-46c1-af6d-4f3fcace4e22" containerName="extract-utilities" Oct 06 14:55:50 crc kubenswrapper[4757]: I1006 14:55:50.301269 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="5fdcc59d-4cf7-46c1-af6d-4f3fcace4e22" containerName="extract-utilities" Oct 06 14:55:50 crc kubenswrapper[4757]: I1006 14:55:50.301438 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="5fdcc59d-4cf7-46c1-af6d-4f3fcace4e22" containerName="registry-server" Oct 06 14:55:50 crc kubenswrapper[4757]: I1006 14:55:50.303025 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-b8tmj" Oct 06 14:55:50 crc kubenswrapper[4757]: I1006 14:55:50.325027 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-b8tmj"] Oct 06 14:55:50 crc kubenswrapper[4757]: I1006 14:55:50.462584 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d965bdfd-8782-4739-9e0d-1e490f60f35f-catalog-content\") pod \"community-operators-b8tmj\" (UID: \"d965bdfd-8782-4739-9e0d-1e490f60f35f\") " pod="openshift-marketplace/community-operators-b8tmj" Oct 06 14:55:50 crc kubenswrapper[4757]: I1006 14:55:50.462658 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d965bdfd-8782-4739-9e0d-1e490f60f35f-utilities\") pod \"community-operators-b8tmj\" (UID: \"d965bdfd-8782-4739-9e0d-1e490f60f35f\") " pod="openshift-marketplace/community-operators-b8tmj" Oct 06 14:55:50 crc kubenswrapper[4757]: I1006 14:55:50.463196 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fjmqf\" (UniqueName: \"kubernetes.io/projected/d965bdfd-8782-4739-9e0d-1e490f60f35f-kube-api-access-fjmqf\") pod \"community-operators-b8tmj\" (UID: \"d965bdfd-8782-4739-9e0d-1e490f60f35f\") " pod="openshift-marketplace/community-operators-b8tmj" Oct 06 14:55:50 crc kubenswrapper[4757]: I1006 14:55:50.564961 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d965bdfd-8782-4739-9e0d-1e490f60f35f-catalog-content\") pod \"community-operators-b8tmj\" (UID: \"d965bdfd-8782-4739-9e0d-1e490f60f35f\") " pod="openshift-marketplace/community-operators-b8tmj" Oct 06 14:55:50 crc kubenswrapper[4757]: I1006 14:55:50.565032 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d965bdfd-8782-4739-9e0d-1e490f60f35f-utilities\") pod \"community-operators-b8tmj\" (UID: \"d965bdfd-8782-4739-9e0d-1e490f60f35f\") " pod="openshift-marketplace/community-operators-b8tmj" Oct 06 14:55:50 crc kubenswrapper[4757]: I1006 14:55:50.565126 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fjmqf\" (UniqueName: \"kubernetes.io/projected/d965bdfd-8782-4739-9e0d-1e490f60f35f-kube-api-access-fjmqf\") pod \"community-operators-b8tmj\" (UID: \"d965bdfd-8782-4739-9e0d-1e490f60f35f\") " pod="openshift-marketplace/community-operators-b8tmj" Oct 06 14:55:50 crc kubenswrapper[4757]: I1006 14:55:50.565731 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d965bdfd-8782-4739-9e0d-1e490f60f35f-catalog-content\") pod \"community-operators-b8tmj\" (UID: \"d965bdfd-8782-4739-9e0d-1e490f60f35f\") " pod="openshift-marketplace/community-operators-b8tmj" Oct 06 14:55:50 crc kubenswrapper[4757]: I1006 14:55:50.565811 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d965bdfd-8782-4739-9e0d-1e490f60f35f-utilities\") pod \"community-operators-b8tmj\" (UID: \"d965bdfd-8782-4739-9e0d-1e490f60f35f\") " pod="openshift-marketplace/community-operators-b8tmj" Oct 06 14:55:50 crc kubenswrapper[4757]: I1006 14:55:50.587333 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fjmqf\" (UniqueName: \"kubernetes.io/projected/d965bdfd-8782-4739-9e0d-1e490f60f35f-kube-api-access-fjmqf\") pod \"community-operators-b8tmj\" (UID: \"d965bdfd-8782-4739-9e0d-1e490f60f35f\") " pod="openshift-marketplace/community-operators-b8tmj" Oct 06 14:55:50 crc kubenswrapper[4757]: I1006 14:55:50.628643 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-b8tmj" Oct 06 14:55:51 crc kubenswrapper[4757]: I1006 14:55:51.074314 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-b8tmj"] Oct 06 14:55:51 crc kubenswrapper[4757]: I1006 14:55:51.174676 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-b8tmj" event={"ID":"d965bdfd-8782-4739-9e0d-1e490f60f35f","Type":"ContainerStarted","Data":"7a649441b649f5880b79cc3da1570f3295f738a39071d05b1b419bfad4458f77"} Oct 06 14:55:52 crc kubenswrapper[4757]: I1006 14:55:52.188969 4757 generic.go:334] "Generic (PLEG): container finished" podID="d965bdfd-8782-4739-9e0d-1e490f60f35f" containerID="1a360489e7f92aa4f0f2198b0b1c1921f58cde77c3eaa3cbca610daf671500ff" exitCode=0 Oct 06 14:55:52 crc kubenswrapper[4757]: I1006 14:55:52.196693 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-b8tmj" event={"ID":"d965bdfd-8782-4739-9e0d-1e490f60f35f","Type":"ContainerDied","Data":"1a360489e7f92aa4f0f2198b0b1c1921f58cde77c3eaa3cbca610daf671500ff"} Oct 06 14:55:54 crc kubenswrapper[4757]: I1006 14:55:54.207991 4757 generic.go:334] "Generic (PLEG): container finished" podID="d965bdfd-8782-4739-9e0d-1e490f60f35f" containerID="92687be23efa9ff39e493dbb553ad4e038a15e10ea4f85da40ef64de869692ff" exitCode=0 Oct 06 14:55:54 crc kubenswrapper[4757]: I1006 14:55:54.208121 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-b8tmj" event={"ID":"d965bdfd-8782-4739-9e0d-1e490f60f35f","Type":"ContainerDied","Data":"92687be23efa9ff39e493dbb553ad4e038a15e10ea4f85da40ef64de869692ff"} Oct 06 14:55:55 crc kubenswrapper[4757]: I1006 14:55:55.220852 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-b8tmj" event={"ID":"d965bdfd-8782-4739-9e0d-1e490f60f35f","Type":"ContainerStarted","Data":"8aea045d213b1f4927eac60f176573f62d6994f8f93fe9014c06886d9fb8bf01"} Oct 06 14:55:55 crc kubenswrapper[4757]: I1006 14:55:55.258571 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-b8tmj" podStartSLOduration=2.571844462 podStartE2EDuration="5.258549472s" podCreationTimestamp="2025-10-06 14:55:50 +0000 UTC" firstStartedPulling="2025-10-06 14:55:52.192162681 +0000 UTC m=+4640.689481228" lastFinishedPulling="2025-10-06 14:55:54.878867701 +0000 UTC m=+4643.376186238" observedRunningTime="2025-10-06 14:55:55.250397838 +0000 UTC m=+4643.747716445" watchObservedRunningTime="2025-10-06 14:55:55.258549472 +0000 UTC m=+4643.755868029" Oct 06 14:56:00 crc kubenswrapper[4757]: I1006 14:56:00.629171 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-b8tmj" Oct 06 14:56:00 crc kubenswrapper[4757]: I1006 14:56:00.629888 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-b8tmj" Oct 06 14:56:00 crc kubenswrapper[4757]: I1006 14:56:00.682152 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-b8tmj" Oct 06 14:56:01 crc kubenswrapper[4757]: I1006 14:56:01.342877 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-b8tmj" Oct 06 14:56:01 crc kubenswrapper[4757]: I1006 14:56:01.394163 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-b8tmj"] Oct 06 14:56:03 crc kubenswrapper[4757]: I1006 14:56:03.299163 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-b8tmj" podUID="d965bdfd-8782-4739-9e0d-1e490f60f35f" containerName="registry-server" containerID="cri-o://8aea045d213b1f4927eac60f176573f62d6994f8f93fe9014c06886d9fb8bf01" gracePeriod=2 Oct 06 14:56:03 crc kubenswrapper[4757]: I1006 14:56:03.775215 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-b8tmj" Oct 06 14:56:03 crc kubenswrapper[4757]: I1006 14:56:03.961958 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d965bdfd-8782-4739-9e0d-1e490f60f35f-utilities\") pod \"d965bdfd-8782-4739-9e0d-1e490f60f35f\" (UID: \"d965bdfd-8782-4739-9e0d-1e490f60f35f\") " Oct 06 14:56:03 crc kubenswrapper[4757]: I1006 14:56:03.962077 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fjmqf\" (UniqueName: \"kubernetes.io/projected/d965bdfd-8782-4739-9e0d-1e490f60f35f-kube-api-access-fjmqf\") pod \"d965bdfd-8782-4739-9e0d-1e490f60f35f\" (UID: \"d965bdfd-8782-4739-9e0d-1e490f60f35f\") " Oct 06 14:56:03 crc kubenswrapper[4757]: I1006 14:56:03.962154 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d965bdfd-8782-4739-9e0d-1e490f60f35f-catalog-content\") pod \"d965bdfd-8782-4739-9e0d-1e490f60f35f\" (UID: \"d965bdfd-8782-4739-9e0d-1e490f60f35f\") " Oct 06 14:56:03 crc kubenswrapper[4757]: I1006 14:56:03.963563 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d965bdfd-8782-4739-9e0d-1e490f60f35f-utilities" (OuterVolumeSpecName: "utilities") pod "d965bdfd-8782-4739-9e0d-1e490f60f35f" (UID: "d965bdfd-8782-4739-9e0d-1e490f60f35f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 14:56:03 crc kubenswrapper[4757]: I1006 14:56:03.968313 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d965bdfd-8782-4739-9e0d-1e490f60f35f-kube-api-access-fjmqf" (OuterVolumeSpecName: "kube-api-access-fjmqf") pod "d965bdfd-8782-4739-9e0d-1e490f60f35f" (UID: "d965bdfd-8782-4739-9e0d-1e490f60f35f"). InnerVolumeSpecName "kube-api-access-fjmqf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 14:56:04 crc kubenswrapper[4757]: I1006 14:56:04.064246 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fjmqf\" (UniqueName: \"kubernetes.io/projected/d965bdfd-8782-4739-9e0d-1e490f60f35f-kube-api-access-fjmqf\") on node \"crc\" DevicePath \"\"" Oct 06 14:56:04 crc kubenswrapper[4757]: I1006 14:56:04.064293 4757 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d965bdfd-8782-4739-9e0d-1e490f60f35f-utilities\") on node \"crc\" DevicePath \"\"" Oct 06 14:56:04 crc kubenswrapper[4757]: I1006 14:56:04.126380 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d965bdfd-8782-4739-9e0d-1e490f60f35f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d965bdfd-8782-4739-9e0d-1e490f60f35f" (UID: "d965bdfd-8782-4739-9e0d-1e490f60f35f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 14:56:04 crc kubenswrapper[4757]: I1006 14:56:04.166481 4757 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d965bdfd-8782-4739-9e0d-1e490f60f35f-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 06 14:56:04 crc kubenswrapper[4757]: I1006 14:56:04.311727 4757 generic.go:334] "Generic (PLEG): container finished" podID="d965bdfd-8782-4739-9e0d-1e490f60f35f" containerID="8aea045d213b1f4927eac60f176573f62d6994f8f93fe9014c06886d9fb8bf01" exitCode=0 Oct 06 14:56:04 crc kubenswrapper[4757]: I1006 14:56:04.311789 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-b8tmj" event={"ID":"d965bdfd-8782-4739-9e0d-1e490f60f35f","Type":"ContainerDied","Data":"8aea045d213b1f4927eac60f176573f62d6994f8f93fe9014c06886d9fb8bf01"} Oct 06 14:56:04 crc kubenswrapper[4757]: I1006 14:56:04.311806 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-b8tmj" Oct 06 14:56:04 crc kubenswrapper[4757]: I1006 14:56:04.311841 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-b8tmj" event={"ID":"d965bdfd-8782-4739-9e0d-1e490f60f35f","Type":"ContainerDied","Data":"7a649441b649f5880b79cc3da1570f3295f738a39071d05b1b419bfad4458f77"} Oct 06 14:56:04 crc kubenswrapper[4757]: I1006 14:56:04.311876 4757 scope.go:117] "RemoveContainer" containerID="8aea045d213b1f4927eac60f176573f62d6994f8f93fe9014c06886d9fb8bf01" Oct 06 14:56:04 crc kubenswrapper[4757]: I1006 14:56:04.348501 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-b8tmj"] Oct 06 14:56:04 crc kubenswrapper[4757]: I1006 14:56:04.365593 4757 scope.go:117] "RemoveContainer" containerID="92687be23efa9ff39e493dbb553ad4e038a15e10ea4f85da40ef64de869692ff" Oct 06 14:56:04 crc kubenswrapper[4757]: I1006 14:56:04.368704 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-b8tmj"] Oct 06 14:56:04 crc kubenswrapper[4757]: I1006 14:56:04.388218 4757 scope.go:117] "RemoveContainer" containerID="1a360489e7f92aa4f0f2198b0b1c1921f58cde77c3eaa3cbca610daf671500ff" Oct 06 14:56:04 crc kubenswrapper[4757]: I1006 14:56:04.421108 4757 scope.go:117] "RemoveContainer" containerID="8aea045d213b1f4927eac60f176573f62d6994f8f93fe9014c06886d9fb8bf01" Oct 06 14:56:04 crc kubenswrapper[4757]: E1006 14:56:04.421679 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8aea045d213b1f4927eac60f176573f62d6994f8f93fe9014c06886d9fb8bf01\": container with ID starting with 8aea045d213b1f4927eac60f176573f62d6994f8f93fe9014c06886d9fb8bf01 not found: ID does not exist" containerID="8aea045d213b1f4927eac60f176573f62d6994f8f93fe9014c06886d9fb8bf01" Oct 06 14:56:04 crc kubenswrapper[4757]: I1006 14:56:04.421757 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8aea045d213b1f4927eac60f176573f62d6994f8f93fe9014c06886d9fb8bf01"} err="failed to get container status \"8aea045d213b1f4927eac60f176573f62d6994f8f93fe9014c06886d9fb8bf01\": rpc error: code = NotFound desc = could not find container \"8aea045d213b1f4927eac60f176573f62d6994f8f93fe9014c06886d9fb8bf01\": container with ID starting with 8aea045d213b1f4927eac60f176573f62d6994f8f93fe9014c06886d9fb8bf01 not found: ID does not exist" Oct 06 14:56:04 crc kubenswrapper[4757]: I1006 14:56:04.421824 4757 scope.go:117] "RemoveContainer" containerID="92687be23efa9ff39e493dbb553ad4e038a15e10ea4f85da40ef64de869692ff" Oct 06 14:56:04 crc kubenswrapper[4757]: E1006 14:56:04.422285 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"92687be23efa9ff39e493dbb553ad4e038a15e10ea4f85da40ef64de869692ff\": container with ID starting with 92687be23efa9ff39e493dbb553ad4e038a15e10ea4f85da40ef64de869692ff not found: ID does not exist" containerID="92687be23efa9ff39e493dbb553ad4e038a15e10ea4f85da40ef64de869692ff" Oct 06 14:56:04 crc kubenswrapper[4757]: I1006 14:56:04.422309 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"92687be23efa9ff39e493dbb553ad4e038a15e10ea4f85da40ef64de869692ff"} err="failed to get container status \"92687be23efa9ff39e493dbb553ad4e038a15e10ea4f85da40ef64de869692ff\": rpc error: code = NotFound desc = could not find container \"92687be23efa9ff39e493dbb553ad4e038a15e10ea4f85da40ef64de869692ff\": container with ID starting with 92687be23efa9ff39e493dbb553ad4e038a15e10ea4f85da40ef64de869692ff not found: ID does not exist" Oct 06 14:56:04 crc kubenswrapper[4757]: I1006 14:56:04.422324 4757 scope.go:117] "RemoveContainer" containerID="1a360489e7f92aa4f0f2198b0b1c1921f58cde77c3eaa3cbca610daf671500ff" Oct 06 14:56:04 crc kubenswrapper[4757]: E1006 14:56:04.422648 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1a360489e7f92aa4f0f2198b0b1c1921f58cde77c3eaa3cbca610daf671500ff\": container with ID starting with 1a360489e7f92aa4f0f2198b0b1c1921f58cde77c3eaa3cbca610daf671500ff not found: ID does not exist" containerID="1a360489e7f92aa4f0f2198b0b1c1921f58cde77c3eaa3cbca610daf671500ff" Oct 06 14:56:04 crc kubenswrapper[4757]: I1006 14:56:04.422678 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1a360489e7f92aa4f0f2198b0b1c1921f58cde77c3eaa3cbca610daf671500ff"} err="failed to get container status \"1a360489e7f92aa4f0f2198b0b1c1921f58cde77c3eaa3cbca610daf671500ff\": rpc error: code = NotFound desc = could not find container \"1a360489e7f92aa4f0f2198b0b1c1921f58cde77c3eaa3cbca610daf671500ff\": container with ID starting with 1a360489e7f92aa4f0f2198b0b1c1921f58cde77c3eaa3cbca610daf671500ff not found: ID does not exist" Oct 06 14:56:06 crc kubenswrapper[4757]: I1006 14:56:06.196238 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d965bdfd-8782-4739-9e0d-1e490f60f35f" path="/var/lib/kubelet/pods/d965bdfd-8782-4739-9e0d-1e490f60f35f/volumes" Oct 06 14:57:34 crc kubenswrapper[4757]: I1006 14:57:34.361281 4757 patch_prober.go:28] interesting pod/machine-config-daemon-7tb7h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 06 14:57:34 crc kubenswrapper[4757]: I1006 14:57:34.362137 4757 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 06 14:58:02 crc kubenswrapper[4757]: I1006 14:58:02.405982 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["crc-storage/crc-storage-crc-kwk85"] Oct 06 14:58:02 crc kubenswrapper[4757]: I1006 14:58:02.416162 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["crc-storage/crc-storage-crc-kwk85"] Oct 06 14:58:02 crc kubenswrapper[4757]: I1006 14:58:02.542463 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["crc-storage/crc-storage-crc-zqbvn"] Oct 06 14:58:02 crc kubenswrapper[4757]: E1006 14:58:02.543063 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d965bdfd-8782-4739-9e0d-1e490f60f35f" containerName="extract-content" Oct 06 14:58:02 crc kubenswrapper[4757]: I1006 14:58:02.543140 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="d965bdfd-8782-4739-9e0d-1e490f60f35f" containerName="extract-content" Oct 06 14:58:02 crc kubenswrapper[4757]: E1006 14:58:02.543164 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d965bdfd-8782-4739-9e0d-1e490f60f35f" containerName="registry-server" Oct 06 14:58:02 crc kubenswrapper[4757]: I1006 14:58:02.543181 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="d965bdfd-8782-4739-9e0d-1e490f60f35f" containerName="registry-server" Oct 06 14:58:02 crc kubenswrapper[4757]: E1006 14:58:02.543257 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d965bdfd-8782-4739-9e0d-1e490f60f35f" containerName="extract-utilities" Oct 06 14:58:02 crc kubenswrapper[4757]: I1006 14:58:02.543280 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="d965bdfd-8782-4739-9e0d-1e490f60f35f" containerName="extract-utilities" Oct 06 14:58:02 crc kubenswrapper[4757]: I1006 14:58:02.543629 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="d965bdfd-8782-4739-9e0d-1e490f60f35f" containerName="registry-server" Oct 06 14:58:02 crc kubenswrapper[4757]: I1006 14:58:02.544833 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-zqbvn" Oct 06 14:58:02 crc kubenswrapper[4757]: I1006 14:58:02.548476 4757 reflector.go:368] Caches populated for *v1.Secret from object-"crc-storage"/"crc-storage-dockercfg-8h4vq" Oct 06 14:58:02 crc kubenswrapper[4757]: I1006 14:58:02.548793 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"kube-root-ca.crt" Oct 06 14:58:02 crc kubenswrapper[4757]: I1006 14:58:02.549129 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"crc-storage" Oct 06 14:58:02 crc kubenswrapper[4757]: I1006 14:58:02.549439 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"openshift-service-ca.crt" Oct 06 14:58:02 crc kubenswrapper[4757]: I1006 14:58:02.554963 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-zqbvn"] Oct 06 14:58:02 crc kubenswrapper[4757]: I1006 14:58:02.689755 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/14a398cf-df25-409e-b2e7-aba9174aa065-crc-storage\") pod \"crc-storage-crc-zqbvn\" (UID: \"14a398cf-df25-409e-b2e7-aba9174aa065\") " pod="crc-storage/crc-storage-crc-zqbvn" Oct 06 14:58:02 crc kubenswrapper[4757]: I1006 14:58:02.689832 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/14a398cf-df25-409e-b2e7-aba9174aa065-node-mnt\") pod \"crc-storage-crc-zqbvn\" (UID: \"14a398cf-df25-409e-b2e7-aba9174aa065\") " pod="crc-storage/crc-storage-crc-zqbvn" Oct 06 14:58:02 crc kubenswrapper[4757]: I1006 14:58:02.689919 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qk94t\" (UniqueName: \"kubernetes.io/projected/14a398cf-df25-409e-b2e7-aba9174aa065-kube-api-access-qk94t\") pod \"crc-storage-crc-zqbvn\" (UID: \"14a398cf-df25-409e-b2e7-aba9174aa065\") " pod="crc-storage/crc-storage-crc-zqbvn" Oct 06 14:58:02 crc kubenswrapper[4757]: I1006 14:58:02.791693 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/14a398cf-df25-409e-b2e7-aba9174aa065-crc-storage\") pod \"crc-storage-crc-zqbvn\" (UID: \"14a398cf-df25-409e-b2e7-aba9174aa065\") " pod="crc-storage/crc-storage-crc-zqbvn" Oct 06 14:58:02 crc kubenswrapper[4757]: I1006 14:58:02.791800 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/14a398cf-df25-409e-b2e7-aba9174aa065-node-mnt\") pod \"crc-storage-crc-zqbvn\" (UID: \"14a398cf-df25-409e-b2e7-aba9174aa065\") " pod="crc-storage/crc-storage-crc-zqbvn" Oct 06 14:58:02 crc kubenswrapper[4757]: I1006 14:58:02.791869 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qk94t\" (UniqueName: \"kubernetes.io/projected/14a398cf-df25-409e-b2e7-aba9174aa065-kube-api-access-qk94t\") pod \"crc-storage-crc-zqbvn\" (UID: \"14a398cf-df25-409e-b2e7-aba9174aa065\") " pod="crc-storage/crc-storage-crc-zqbvn" Oct 06 14:58:02 crc kubenswrapper[4757]: I1006 14:58:02.792191 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/14a398cf-df25-409e-b2e7-aba9174aa065-node-mnt\") pod \"crc-storage-crc-zqbvn\" (UID: \"14a398cf-df25-409e-b2e7-aba9174aa065\") " pod="crc-storage/crc-storage-crc-zqbvn" Oct 06 14:58:02 crc kubenswrapper[4757]: I1006 14:58:02.792490 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/14a398cf-df25-409e-b2e7-aba9174aa065-crc-storage\") pod \"crc-storage-crc-zqbvn\" (UID: \"14a398cf-df25-409e-b2e7-aba9174aa065\") " pod="crc-storage/crc-storage-crc-zqbvn" Oct 06 14:58:02 crc kubenswrapper[4757]: I1006 14:58:02.817562 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qk94t\" (UniqueName: \"kubernetes.io/projected/14a398cf-df25-409e-b2e7-aba9174aa065-kube-api-access-qk94t\") pod \"crc-storage-crc-zqbvn\" (UID: \"14a398cf-df25-409e-b2e7-aba9174aa065\") " pod="crc-storage/crc-storage-crc-zqbvn" Oct 06 14:58:02 crc kubenswrapper[4757]: I1006 14:58:02.870063 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-zqbvn" Oct 06 14:58:03 crc kubenswrapper[4757]: I1006 14:58:03.112658 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-zqbvn"] Oct 06 14:58:03 crc kubenswrapper[4757]: I1006 14:58:03.126158 4757 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 06 14:58:03 crc kubenswrapper[4757]: I1006 14:58:03.318076 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-zqbvn" event={"ID":"14a398cf-df25-409e-b2e7-aba9174aa065","Type":"ContainerStarted","Data":"579d747707e3d4df2b386d6dd35b4718c6e2667020543a80167221fb3c650495"} Oct 06 14:58:04 crc kubenswrapper[4757]: I1006 14:58:04.196780 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d9d9e156-e9b3-454d-88d8-d5b2c9c54512" path="/var/lib/kubelet/pods/d9d9e156-e9b3-454d-88d8-d5b2c9c54512/volumes" Oct 06 14:58:04 crc kubenswrapper[4757]: I1006 14:58:04.333592 4757 generic.go:334] "Generic (PLEG): container finished" podID="14a398cf-df25-409e-b2e7-aba9174aa065" containerID="a53e77c23399ece65bfb873f4b595d36079c6f459b84c526a54bafe3c6a43ee2" exitCode=0 Oct 06 14:58:04 crc kubenswrapper[4757]: I1006 14:58:04.333630 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-zqbvn" event={"ID":"14a398cf-df25-409e-b2e7-aba9174aa065","Type":"ContainerDied","Data":"a53e77c23399ece65bfb873f4b595d36079c6f459b84c526a54bafe3c6a43ee2"} Oct 06 14:58:04 crc kubenswrapper[4757]: I1006 14:58:04.361739 4757 patch_prober.go:28] interesting pod/machine-config-daemon-7tb7h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 06 14:58:04 crc kubenswrapper[4757]: I1006 14:58:04.362533 4757 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 06 14:58:05 crc kubenswrapper[4757]: I1006 14:58:05.631317 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-zqbvn" Oct 06 14:58:05 crc kubenswrapper[4757]: I1006 14:58:05.733709 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/14a398cf-df25-409e-b2e7-aba9174aa065-crc-storage\") pod \"14a398cf-df25-409e-b2e7-aba9174aa065\" (UID: \"14a398cf-df25-409e-b2e7-aba9174aa065\") " Oct 06 14:58:05 crc kubenswrapper[4757]: I1006 14:58:05.733806 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/14a398cf-df25-409e-b2e7-aba9174aa065-node-mnt\") pod \"14a398cf-df25-409e-b2e7-aba9174aa065\" (UID: \"14a398cf-df25-409e-b2e7-aba9174aa065\") " Oct 06 14:58:05 crc kubenswrapper[4757]: I1006 14:58:05.733841 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qk94t\" (UniqueName: \"kubernetes.io/projected/14a398cf-df25-409e-b2e7-aba9174aa065-kube-api-access-qk94t\") pod \"14a398cf-df25-409e-b2e7-aba9174aa065\" (UID: \"14a398cf-df25-409e-b2e7-aba9174aa065\") " Oct 06 14:58:05 crc kubenswrapper[4757]: I1006 14:58:05.734002 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/14a398cf-df25-409e-b2e7-aba9174aa065-node-mnt" (OuterVolumeSpecName: "node-mnt") pod "14a398cf-df25-409e-b2e7-aba9174aa065" (UID: "14a398cf-df25-409e-b2e7-aba9174aa065"). InnerVolumeSpecName "node-mnt". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 06 14:58:05 crc kubenswrapper[4757]: I1006 14:58:05.734446 4757 reconciler_common.go:293] "Volume detached for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/14a398cf-df25-409e-b2e7-aba9174aa065-node-mnt\") on node \"crc\" DevicePath \"\"" Oct 06 14:58:05 crc kubenswrapper[4757]: I1006 14:58:05.739838 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/14a398cf-df25-409e-b2e7-aba9174aa065-kube-api-access-qk94t" (OuterVolumeSpecName: "kube-api-access-qk94t") pod "14a398cf-df25-409e-b2e7-aba9174aa065" (UID: "14a398cf-df25-409e-b2e7-aba9174aa065"). InnerVolumeSpecName "kube-api-access-qk94t". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 14:58:05 crc kubenswrapper[4757]: I1006 14:58:05.776905 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/14a398cf-df25-409e-b2e7-aba9174aa065-crc-storage" (OuterVolumeSpecName: "crc-storage") pod "14a398cf-df25-409e-b2e7-aba9174aa065" (UID: "14a398cf-df25-409e-b2e7-aba9174aa065"). InnerVolumeSpecName "crc-storage". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 14:58:05 crc kubenswrapper[4757]: I1006 14:58:05.836176 4757 reconciler_common.go:293] "Volume detached for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/14a398cf-df25-409e-b2e7-aba9174aa065-crc-storage\") on node \"crc\" DevicePath \"\"" Oct 06 14:58:05 crc kubenswrapper[4757]: I1006 14:58:05.836243 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qk94t\" (UniqueName: \"kubernetes.io/projected/14a398cf-df25-409e-b2e7-aba9174aa065-kube-api-access-qk94t\") on node \"crc\" DevicePath \"\"" Oct 06 14:58:06 crc kubenswrapper[4757]: I1006 14:58:06.353255 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-zqbvn" event={"ID":"14a398cf-df25-409e-b2e7-aba9174aa065","Type":"ContainerDied","Data":"579d747707e3d4df2b386d6dd35b4718c6e2667020543a80167221fb3c650495"} Oct 06 14:58:06 crc kubenswrapper[4757]: I1006 14:58:06.353665 4757 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="579d747707e3d4df2b386d6dd35b4718c6e2667020543a80167221fb3c650495" Oct 06 14:58:06 crc kubenswrapper[4757]: I1006 14:58:06.353306 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-zqbvn" Oct 06 14:58:07 crc kubenswrapper[4757]: I1006 14:58:07.967212 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["crc-storage/crc-storage-crc-zqbvn"] Oct 06 14:58:07 crc kubenswrapper[4757]: I1006 14:58:07.974329 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["crc-storage/crc-storage-crc-zqbvn"] Oct 06 14:58:08 crc kubenswrapper[4757]: I1006 14:58:08.136632 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["crc-storage/crc-storage-crc-ff2bp"] Oct 06 14:58:08 crc kubenswrapper[4757]: E1006 14:58:08.137298 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="14a398cf-df25-409e-b2e7-aba9174aa065" containerName="storage" Oct 06 14:58:08 crc kubenswrapper[4757]: I1006 14:58:08.137329 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="14a398cf-df25-409e-b2e7-aba9174aa065" containerName="storage" Oct 06 14:58:08 crc kubenswrapper[4757]: I1006 14:58:08.137562 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="14a398cf-df25-409e-b2e7-aba9174aa065" containerName="storage" Oct 06 14:58:08 crc kubenswrapper[4757]: I1006 14:58:08.138392 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-ff2bp" Oct 06 14:58:08 crc kubenswrapper[4757]: I1006 14:58:08.142443 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-ff2bp"] Oct 06 14:58:08 crc kubenswrapper[4757]: I1006 14:58:08.144142 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"openshift-service-ca.crt" Oct 06 14:58:08 crc kubenswrapper[4757]: I1006 14:58:08.144604 4757 reflector.go:368] Caches populated for *v1.Secret from object-"crc-storage"/"crc-storage-dockercfg-8h4vq" Oct 06 14:58:08 crc kubenswrapper[4757]: I1006 14:58:08.144784 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"crc-storage" Oct 06 14:58:08 crc kubenswrapper[4757]: I1006 14:58:08.144951 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"kube-root-ca.crt" Oct 06 14:58:08 crc kubenswrapper[4757]: I1006 14:58:08.201197 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="14a398cf-df25-409e-b2e7-aba9174aa065" path="/var/lib/kubelet/pods/14a398cf-df25-409e-b2e7-aba9174aa065/volumes" Oct 06 14:58:08 crc kubenswrapper[4757]: I1006 14:58:08.272560 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t4khr\" (UniqueName: \"kubernetes.io/projected/03d6d8ea-ce41-4877-bb5a-4d4296f8bde5-kube-api-access-t4khr\") pod \"crc-storage-crc-ff2bp\" (UID: \"03d6d8ea-ce41-4877-bb5a-4d4296f8bde5\") " pod="crc-storage/crc-storage-crc-ff2bp" Oct 06 14:58:08 crc kubenswrapper[4757]: I1006 14:58:08.272892 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/03d6d8ea-ce41-4877-bb5a-4d4296f8bde5-crc-storage\") pod \"crc-storage-crc-ff2bp\" (UID: \"03d6d8ea-ce41-4877-bb5a-4d4296f8bde5\") " pod="crc-storage/crc-storage-crc-ff2bp" Oct 06 14:58:08 crc kubenswrapper[4757]: I1006 14:58:08.273056 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/03d6d8ea-ce41-4877-bb5a-4d4296f8bde5-node-mnt\") pod \"crc-storage-crc-ff2bp\" (UID: \"03d6d8ea-ce41-4877-bb5a-4d4296f8bde5\") " pod="crc-storage/crc-storage-crc-ff2bp" Oct 06 14:58:08 crc kubenswrapper[4757]: I1006 14:58:08.374324 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/03d6d8ea-ce41-4877-bb5a-4d4296f8bde5-crc-storage\") pod \"crc-storage-crc-ff2bp\" (UID: \"03d6d8ea-ce41-4877-bb5a-4d4296f8bde5\") " pod="crc-storage/crc-storage-crc-ff2bp" Oct 06 14:58:08 crc kubenswrapper[4757]: I1006 14:58:08.374421 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/03d6d8ea-ce41-4877-bb5a-4d4296f8bde5-node-mnt\") pod \"crc-storage-crc-ff2bp\" (UID: \"03d6d8ea-ce41-4877-bb5a-4d4296f8bde5\") " pod="crc-storage/crc-storage-crc-ff2bp" Oct 06 14:58:08 crc kubenswrapper[4757]: I1006 14:58:08.374490 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t4khr\" (UniqueName: \"kubernetes.io/projected/03d6d8ea-ce41-4877-bb5a-4d4296f8bde5-kube-api-access-t4khr\") pod \"crc-storage-crc-ff2bp\" (UID: \"03d6d8ea-ce41-4877-bb5a-4d4296f8bde5\") " pod="crc-storage/crc-storage-crc-ff2bp" Oct 06 14:58:08 crc kubenswrapper[4757]: I1006 14:58:08.374812 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/03d6d8ea-ce41-4877-bb5a-4d4296f8bde5-node-mnt\") pod \"crc-storage-crc-ff2bp\" (UID: \"03d6d8ea-ce41-4877-bb5a-4d4296f8bde5\") " pod="crc-storage/crc-storage-crc-ff2bp" Oct 06 14:58:08 crc kubenswrapper[4757]: I1006 14:58:08.378381 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/03d6d8ea-ce41-4877-bb5a-4d4296f8bde5-crc-storage\") pod \"crc-storage-crc-ff2bp\" (UID: \"03d6d8ea-ce41-4877-bb5a-4d4296f8bde5\") " pod="crc-storage/crc-storage-crc-ff2bp" Oct 06 14:58:08 crc kubenswrapper[4757]: I1006 14:58:08.403493 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t4khr\" (UniqueName: \"kubernetes.io/projected/03d6d8ea-ce41-4877-bb5a-4d4296f8bde5-kube-api-access-t4khr\") pod \"crc-storage-crc-ff2bp\" (UID: \"03d6d8ea-ce41-4877-bb5a-4d4296f8bde5\") " pod="crc-storage/crc-storage-crc-ff2bp" Oct 06 14:58:08 crc kubenswrapper[4757]: I1006 14:58:08.456935 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-ff2bp" Oct 06 14:58:08 crc kubenswrapper[4757]: I1006 14:58:08.886394 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-ff2bp"] Oct 06 14:58:09 crc kubenswrapper[4757]: I1006 14:58:09.377928 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-ff2bp" event={"ID":"03d6d8ea-ce41-4877-bb5a-4d4296f8bde5","Type":"ContainerStarted","Data":"2fe51ca4abc545d4638f554ac14072ad758f63a45dd577832f3ea539f876a07c"} Oct 06 14:58:10 crc kubenswrapper[4757]: I1006 14:58:10.394693 4757 generic.go:334] "Generic (PLEG): container finished" podID="03d6d8ea-ce41-4877-bb5a-4d4296f8bde5" containerID="76eeebce096963ef22d2415ff5fd19709625886fc41dfd4b2404d651a6fcee8d" exitCode=0 Oct 06 14:58:10 crc kubenswrapper[4757]: I1006 14:58:10.394793 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-ff2bp" event={"ID":"03d6d8ea-ce41-4877-bb5a-4d4296f8bde5","Type":"ContainerDied","Data":"76eeebce096963ef22d2415ff5fd19709625886fc41dfd4b2404d651a6fcee8d"} Oct 06 14:58:11 crc kubenswrapper[4757]: I1006 14:58:11.688501 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-ff2bp" Oct 06 14:58:11 crc kubenswrapper[4757]: I1006 14:58:11.821744 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t4khr\" (UniqueName: \"kubernetes.io/projected/03d6d8ea-ce41-4877-bb5a-4d4296f8bde5-kube-api-access-t4khr\") pod \"03d6d8ea-ce41-4877-bb5a-4d4296f8bde5\" (UID: \"03d6d8ea-ce41-4877-bb5a-4d4296f8bde5\") " Oct 06 14:58:11 crc kubenswrapper[4757]: I1006 14:58:11.821831 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/03d6d8ea-ce41-4877-bb5a-4d4296f8bde5-node-mnt\") pod \"03d6d8ea-ce41-4877-bb5a-4d4296f8bde5\" (UID: \"03d6d8ea-ce41-4877-bb5a-4d4296f8bde5\") " Oct 06 14:58:11 crc kubenswrapper[4757]: I1006 14:58:11.821881 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/03d6d8ea-ce41-4877-bb5a-4d4296f8bde5-crc-storage\") pod \"03d6d8ea-ce41-4877-bb5a-4d4296f8bde5\" (UID: \"03d6d8ea-ce41-4877-bb5a-4d4296f8bde5\") " Oct 06 14:58:11 crc kubenswrapper[4757]: I1006 14:58:11.822261 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/03d6d8ea-ce41-4877-bb5a-4d4296f8bde5-node-mnt" (OuterVolumeSpecName: "node-mnt") pod "03d6d8ea-ce41-4877-bb5a-4d4296f8bde5" (UID: "03d6d8ea-ce41-4877-bb5a-4d4296f8bde5"). InnerVolumeSpecName "node-mnt". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 06 14:58:11 crc kubenswrapper[4757]: I1006 14:58:11.829379 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/03d6d8ea-ce41-4877-bb5a-4d4296f8bde5-kube-api-access-t4khr" (OuterVolumeSpecName: "kube-api-access-t4khr") pod "03d6d8ea-ce41-4877-bb5a-4d4296f8bde5" (UID: "03d6d8ea-ce41-4877-bb5a-4d4296f8bde5"). InnerVolumeSpecName "kube-api-access-t4khr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 14:58:11 crc kubenswrapper[4757]: I1006 14:58:11.840438 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/03d6d8ea-ce41-4877-bb5a-4d4296f8bde5-crc-storage" (OuterVolumeSpecName: "crc-storage") pod "03d6d8ea-ce41-4877-bb5a-4d4296f8bde5" (UID: "03d6d8ea-ce41-4877-bb5a-4d4296f8bde5"). InnerVolumeSpecName "crc-storage". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 14:58:11 crc kubenswrapper[4757]: I1006 14:58:11.924073 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t4khr\" (UniqueName: \"kubernetes.io/projected/03d6d8ea-ce41-4877-bb5a-4d4296f8bde5-kube-api-access-t4khr\") on node \"crc\" DevicePath \"\"" Oct 06 14:58:11 crc kubenswrapper[4757]: I1006 14:58:11.924127 4757 reconciler_common.go:293] "Volume detached for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/03d6d8ea-ce41-4877-bb5a-4d4296f8bde5-node-mnt\") on node \"crc\" DevicePath \"\"" Oct 06 14:58:11 crc kubenswrapper[4757]: I1006 14:58:11.924137 4757 reconciler_common.go:293] "Volume detached for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/03d6d8ea-ce41-4877-bb5a-4d4296f8bde5-crc-storage\") on node \"crc\" DevicePath \"\"" Oct 06 14:58:12 crc kubenswrapper[4757]: I1006 14:58:12.410161 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-ff2bp" event={"ID":"03d6d8ea-ce41-4877-bb5a-4d4296f8bde5","Type":"ContainerDied","Data":"2fe51ca4abc545d4638f554ac14072ad758f63a45dd577832f3ea539f876a07c"} Oct 06 14:58:12 crc kubenswrapper[4757]: I1006 14:58:12.410200 4757 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2fe51ca4abc545d4638f554ac14072ad758f63a45dd577832f3ea539f876a07c" Oct 06 14:58:12 crc kubenswrapper[4757]: I1006 14:58:12.410222 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-ff2bp" Oct 06 14:58:34 crc kubenswrapper[4757]: I1006 14:58:34.360908 4757 patch_prober.go:28] interesting pod/machine-config-daemon-7tb7h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 06 14:58:34 crc kubenswrapper[4757]: I1006 14:58:34.361711 4757 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 06 14:58:34 crc kubenswrapper[4757]: I1006 14:58:34.361807 4757 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" Oct 06 14:58:34 crc kubenswrapper[4757]: I1006 14:58:34.362735 4757 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"bedffa6bb0c8f2dc43b319e7042746c9f10fca403f3f6e4886f8e811128a83a1"} pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 06 14:58:34 crc kubenswrapper[4757]: I1006 14:58:34.362843 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" containerName="machine-config-daemon" containerID="cri-o://bedffa6bb0c8f2dc43b319e7042746c9f10fca403f3f6e4886f8e811128a83a1" gracePeriod=600 Oct 06 14:58:34 crc kubenswrapper[4757]: I1006 14:58:34.650392 4757 generic.go:334] "Generic (PLEG): container finished" podID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" containerID="bedffa6bb0c8f2dc43b319e7042746c9f10fca403f3f6e4886f8e811128a83a1" exitCode=0 Oct 06 14:58:34 crc kubenswrapper[4757]: I1006 14:58:34.650478 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" event={"ID":"0010c888-d5ad-4b2b-8309-1647fdf0dee3","Type":"ContainerDied","Data":"bedffa6bb0c8f2dc43b319e7042746c9f10fca403f3f6e4886f8e811128a83a1"} Oct 06 14:58:34 crc kubenswrapper[4757]: I1006 14:58:34.650964 4757 scope.go:117] "RemoveContainer" containerID="4a9d6ebdcaf823a4eb3fc2bef5a2f0b2046cf043ea1e0ad4d8bbe3e2e5542439" Oct 06 14:58:35 crc kubenswrapper[4757]: I1006 14:58:35.663916 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" event={"ID":"0010c888-d5ad-4b2b-8309-1647fdf0dee3","Type":"ContainerStarted","Data":"5c4563212ce072d71c658e00c8096ab0ddad498c69fa13e6bee8d2a76ec2f125"} Oct 06 14:58:37 crc kubenswrapper[4757]: I1006 14:58:37.198508 4757 scope.go:117] "RemoveContainer" containerID="1ede0fc18eea69d8072bbcd68d5c54e1392f3047086b2604a03f157eb5999fb6" Oct 06 14:58:41 crc kubenswrapper[4757]: I1006 14:58:41.971084 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-qfqnj"] Oct 06 14:58:41 crc kubenswrapper[4757]: E1006 14:58:41.971882 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03d6d8ea-ce41-4877-bb5a-4d4296f8bde5" containerName="storage" Oct 06 14:58:41 crc kubenswrapper[4757]: I1006 14:58:41.971894 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="03d6d8ea-ce41-4877-bb5a-4d4296f8bde5" containerName="storage" Oct 06 14:58:41 crc kubenswrapper[4757]: I1006 14:58:41.972029 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="03d6d8ea-ce41-4877-bb5a-4d4296f8bde5" containerName="storage" Oct 06 14:58:41 crc kubenswrapper[4757]: I1006 14:58:41.972995 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qfqnj" Oct 06 14:58:41 crc kubenswrapper[4757]: I1006 14:58:41.985042 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-qfqnj"] Oct 06 14:58:42 crc kubenswrapper[4757]: I1006 14:58:42.095122 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b3395743-c9ac-4836-afd5-1cbebeec41de-utilities\") pod \"certified-operators-qfqnj\" (UID: \"b3395743-c9ac-4836-afd5-1cbebeec41de\") " pod="openshift-marketplace/certified-operators-qfqnj" Oct 06 14:58:42 crc kubenswrapper[4757]: I1006 14:58:42.095245 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zzlnk\" (UniqueName: \"kubernetes.io/projected/b3395743-c9ac-4836-afd5-1cbebeec41de-kube-api-access-zzlnk\") pod \"certified-operators-qfqnj\" (UID: \"b3395743-c9ac-4836-afd5-1cbebeec41de\") " pod="openshift-marketplace/certified-operators-qfqnj" Oct 06 14:58:42 crc kubenswrapper[4757]: I1006 14:58:42.095372 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b3395743-c9ac-4836-afd5-1cbebeec41de-catalog-content\") pod \"certified-operators-qfqnj\" (UID: \"b3395743-c9ac-4836-afd5-1cbebeec41de\") " pod="openshift-marketplace/certified-operators-qfqnj" Oct 06 14:58:42 crc kubenswrapper[4757]: I1006 14:58:42.196380 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b3395743-c9ac-4836-afd5-1cbebeec41de-utilities\") pod \"certified-operators-qfqnj\" (UID: \"b3395743-c9ac-4836-afd5-1cbebeec41de\") " pod="openshift-marketplace/certified-operators-qfqnj" Oct 06 14:58:42 crc kubenswrapper[4757]: I1006 14:58:42.196457 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zzlnk\" (UniqueName: \"kubernetes.io/projected/b3395743-c9ac-4836-afd5-1cbebeec41de-kube-api-access-zzlnk\") pod \"certified-operators-qfqnj\" (UID: \"b3395743-c9ac-4836-afd5-1cbebeec41de\") " pod="openshift-marketplace/certified-operators-qfqnj" Oct 06 14:58:42 crc kubenswrapper[4757]: I1006 14:58:42.196498 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b3395743-c9ac-4836-afd5-1cbebeec41de-catalog-content\") pod \"certified-operators-qfqnj\" (UID: \"b3395743-c9ac-4836-afd5-1cbebeec41de\") " pod="openshift-marketplace/certified-operators-qfqnj" Oct 06 14:58:42 crc kubenswrapper[4757]: I1006 14:58:42.196895 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b3395743-c9ac-4836-afd5-1cbebeec41de-utilities\") pod \"certified-operators-qfqnj\" (UID: \"b3395743-c9ac-4836-afd5-1cbebeec41de\") " pod="openshift-marketplace/certified-operators-qfqnj" Oct 06 14:58:42 crc kubenswrapper[4757]: I1006 14:58:42.197013 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b3395743-c9ac-4836-afd5-1cbebeec41de-catalog-content\") pod \"certified-operators-qfqnj\" (UID: \"b3395743-c9ac-4836-afd5-1cbebeec41de\") " pod="openshift-marketplace/certified-operators-qfqnj" Oct 06 14:58:42 crc kubenswrapper[4757]: I1006 14:58:42.214747 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zzlnk\" (UniqueName: \"kubernetes.io/projected/b3395743-c9ac-4836-afd5-1cbebeec41de-kube-api-access-zzlnk\") pod \"certified-operators-qfqnj\" (UID: \"b3395743-c9ac-4836-afd5-1cbebeec41de\") " pod="openshift-marketplace/certified-operators-qfqnj" Oct 06 14:58:42 crc kubenswrapper[4757]: I1006 14:58:42.292362 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qfqnj" Oct 06 14:58:42 crc kubenswrapper[4757]: I1006 14:58:42.747642 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-qfqnj"] Oct 06 14:58:43 crc kubenswrapper[4757]: I1006 14:58:43.730331 4757 generic.go:334] "Generic (PLEG): container finished" podID="b3395743-c9ac-4836-afd5-1cbebeec41de" containerID="31e14067fb4d5d6b8fd3d5a6e0efd67f88854ceafef2c70c4e6e3ef902afd2de" exitCode=0 Oct 06 14:58:43 crc kubenswrapper[4757]: I1006 14:58:43.730428 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qfqnj" event={"ID":"b3395743-c9ac-4836-afd5-1cbebeec41de","Type":"ContainerDied","Data":"31e14067fb4d5d6b8fd3d5a6e0efd67f88854ceafef2c70c4e6e3ef902afd2de"} Oct 06 14:58:43 crc kubenswrapper[4757]: I1006 14:58:43.730694 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qfqnj" event={"ID":"b3395743-c9ac-4836-afd5-1cbebeec41de","Type":"ContainerStarted","Data":"a9e499c81c0b5ed92ad4cae619918f5e438685d982d7cf506b568f0db5463830"} Oct 06 14:58:45 crc kubenswrapper[4757]: I1006 14:58:45.745168 4757 generic.go:334] "Generic (PLEG): container finished" podID="b3395743-c9ac-4836-afd5-1cbebeec41de" containerID="1af88faf77c39b32328cb76782352f857fbcd63dc0e4d9353ec4abd68962ffa2" exitCode=0 Oct 06 14:58:45 crc kubenswrapper[4757]: I1006 14:58:45.745282 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qfqnj" event={"ID":"b3395743-c9ac-4836-afd5-1cbebeec41de","Type":"ContainerDied","Data":"1af88faf77c39b32328cb76782352f857fbcd63dc0e4d9353ec4abd68962ffa2"} Oct 06 14:58:46 crc kubenswrapper[4757]: I1006 14:58:46.756158 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qfqnj" event={"ID":"b3395743-c9ac-4836-afd5-1cbebeec41de","Type":"ContainerStarted","Data":"e5a83edef221bb5061937dc3002f8a4e91735f2f305e107f7f21dd9a2790eed3"} Oct 06 14:58:46 crc kubenswrapper[4757]: I1006 14:58:46.780198 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-qfqnj" podStartSLOduration=3.151989495 podStartE2EDuration="5.780176983s" podCreationTimestamp="2025-10-06 14:58:41 +0000 UTC" firstStartedPulling="2025-10-06 14:58:43.733008183 +0000 UTC m=+4812.230326760" lastFinishedPulling="2025-10-06 14:58:46.361195711 +0000 UTC m=+4814.858514248" observedRunningTime="2025-10-06 14:58:46.774260771 +0000 UTC m=+4815.271579348" watchObservedRunningTime="2025-10-06 14:58:46.780176983 +0000 UTC m=+4815.277495530" Oct 06 14:58:52 crc kubenswrapper[4757]: I1006 14:58:52.293207 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-qfqnj" Oct 06 14:58:52 crc kubenswrapper[4757]: I1006 14:58:52.294238 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-qfqnj" Oct 06 14:58:52 crc kubenswrapper[4757]: I1006 14:58:52.370885 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-qfqnj" Oct 06 14:58:52 crc kubenswrapper[4757]: I1006 14:58:52.841378 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-qfqnj" Oct 06 14:58:52 crc kubenswrapper[4757]: I1006 14:58:52.880392 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-qfqnj"] Oct 06 14:58:54 crc kubenswrapper[4757]: I1006 14:58:54.820966 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-qfqnj" podUID="b3395743-c9ac-4836-afd5-1cbebeec41de" containerName="registry-server" containerID="cri-o://e5a83edef221bb5061937dc3002f8a4e91735f2f305e107f7f21dd9a2790eed3" gracePeriod=2 Oct 06 14:58:55 crc kubenswrapper[4757]: I1006 14:58:55.229752 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qfqnj" Oct 06 14:58:55 crc kubenswrapper[4757]: I1006 14:58:55.280598 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b3395743-c9ac-4836-afd5-1cbebeec41de-utilities\") pod \"b3395743-c9ac-4836-afd5-1cbebeec41de\" (UID: \"b3395743-c9ac-4836-afd5-1cbebeec41de\") " Oct 06 14:58:55 crc kubenswrapper[4757]: I1006 14:58:55.280677 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zzlnk\" (UniqueName: \"kubernetes.io/projected/b3395743-c9ac-4836-afd5-1cbebeec41de-kube-api-access-zzlnk\") pod \"b3395743-c9ac-4836-afd5-1cbebeec41de\" (UID: \"b3395743-c9ac-4836-afd5-1cbebeec41de\") " Oct 06 14:58:55 crc kubenswrapper[4757]: I1006 14:58:55.280728 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b3395743-c9ac-4836-afd5-1cbebeec41de-catalog-content\") pod \"b3395743-c9ac-4836-afd5-1cbebeec41de\" (UID: \"b3395743-c9ac-4836-afd5-1cbebeec41de\") " Oct 06 14:58:55 crc kubenswrapper[4757]: I1006 14:58:55.282287 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b3395743-c9ac-4836-afd5-1cbebeec41de-utilities" (OuterVolumeSpecName: "utilities") pod "b3395743-c9ac-4836-afd5-1cbebeec41de" (UID: "b3395743-c9ac-4836-afd5-1cbebeec41de"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 14:58:55 crc kubenswrapper[4757]: I1006 14:58:55.287568 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b3395743-c9ac-4836-afd5-1cbebeec41de-kube-api-access-zzlnk" (OuterVolumeSpecName: "kube-api-access-zzlnk") pod "b3395743-c9ac-4836-afd5-1cbebeec41de" (UID: "b3395743-c9ac-4836-afd5-1cbebeec41de"). InnerVolumeSpecName "kube-api-access-zzlnk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 14:58:55 crc kubenswrapper[4757]: I1006 14:58:55.339888 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b3395743-c9ac-4836-afd5-1cbebeec41de-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b3395743-c9ac-4836-afd5-1cbebeec41de" (UID: "b3395743-c9ac-4836-afd5-1cbebeec41de"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 14:58:55 crc kubenswrapper[4757]: I1006 14:58:55.382683 4757 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b3395743-c9ac-4836-afd5-1cbebeec41de-utilities\") on node \"crc\" DevicePath \"\"" Oct 06 14:58:55 crc kubenswrapper[4757]: I1006 14:58:55.382728 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zzlnk\" (UniqueName: \"kubernetes.io/projected/b3395743-c9ac-4836-afd5-1cbebeec41de-kube-api-access-zzlnk\") on node \"crc\" DevicePath \"\"" Oct 06 14:58:55 crc kubenswrapper[4757]: I1006 14:58:55.382742 4757 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b3395743-c9ac-4836-afd5-1cbebeec41de-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 06 14:58:55 crc kubenswrapper[4757]: I1006 14:58:55.853966 4757 generic.go:334] "Generic (PLEG): container finished" podID="b3395743-c9ac-4836-afd5-1cbebeec41de" containerID="e5a83edef221bb5061937dc3002f8a4e91735f2f305e107f7f21dd9a2790eed3" exitCode=0 Oct 06 14:58:55 crc kubenswrapper[4757]: I1006 14:58:55.854044 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qfqnj" event={"ID":"b3395743-c9ac-4836-afd5-1cbebeec41de","Type":"ContainerDied","Data":"e5a83edef221bb5061937dc3002f8a4e91735f2f305e107f7f21dd9a2790eed3"} Oct 06 14:58:55 crc kubenswrapper[4757]: I1006 14:58:55.854146 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qfqnj" event={"ID":"b3395743-c9ac-4836-afd5-1cbebeec41de","Type":"ContainerDied","Data":"a9e499c81c0b5ed92ad4cae619918f5e438685d982d7cf506b568f0db5463830"} Oct 06 14:58:55 crc kubenswrapper[4757]: I1006 14:58:55.854278 4757 scope.go:117] "RemoveContainer" containerID="e5a83edef221bb5061937dc3002f8a4e91735f2f305e107f7f21dd9a2790eed3" Oct 06 14:58:55 crc kubenswrapper[4757]: I1006 14:58:55.854668 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qfqnj" Oct 06 14:58:55 crc kubenswrapper[4757]: I1006 14:58:55.895209 4757 scope.go:117] "RemoveContainer" containerID="1af88faf77c39b32328cb76782352f857fbcd63dc0e4d9353ec4abd68962ffa2" Oct 06 14:58:55 crc kubenswrapper[4757]: I1006 14:58:55.913040 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-qfqnj"] Oct 06 14:58:55 crc kubenswrapper[4757]: I1006 14:58:55.925558 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-qfqnj"] Oct 06 14:58:55 crc kubenswrapper[4757]: I1006 14:58:55.944985 4757 scope.go:117] "RemoveContainer" containerID="31e14067fb4d5d6b8fd3d5a6e0efd67f88854ceafef2c70c4e6e3ef902afd2de" Oct 06 14:58:55 crc kubenswrapper[4757]: I1006 14:58:55.970218 4757 scope.go:117] "RemoveContainer" containerID="e5a83edef221bb5061937dc3002f8a4e91735f2f305e107f7f21dd9a2790eed3" Oct 06 14:58:55 crc kubenswrapper[4757]: E1006 14:58:55.970778 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e5a83edef221bb5061937dc3002f8a4e91735f2f305e107f7f21dd9a2790eed3\": container with ID starting with e5a83edef221bb5061937dc3002f8a4e91735f2f305e107f7f21dd9a2790eed3 not found: ID does not exist" containerID="e5a83edef221bb5061937dc3002f8a4e91735f2f305e107f7f21dd9a2790eed3" Oct 06 14:58:55 crc kubenswrapper[4757]: I1006 14:58:55.970866 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e5a83edef221bb5061937dc3002f8a4e91735f2f305e107f7f21dd9a2790eed3"} err="failed to get container status \"e5a83edef221bb5061937dc3002f8a4e91735f2f305e107f7f21dd9a2790eed3\": rpc error: code = NotFound desc = could not find container \"e5a83edef221bb5061937dc3002f8a4e91735f2f305e107f7f21dd9a2790eed3\": container with ID starting with e5a83edef221bb5061937dc3002f8a4e91735f2f305e107f7f21dd9a2790eed3 not found: ID does not exist" Oct 06 14:58:55 crc kubenswrapper[4757]: I1006 14:58:55.970915 4757 scope.go:117] "RemoveContainer" containerID="1af88faf77c39b32328cb76782352f857fbcd63dc0e4d9353ec4abd68962ffa2" Oct 06 14:58:55 crc kubenswrapper[4757]: E1006 14:58:55.971303 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1af88faf77c39b32328cb76782352f857fbcd63dc0e4d9353ec4abd68962ffa2\": container with ID starting with 1af88faf77c39b32328cb76782352f857fbcd63dc0e4d9353ec4abd68962ffa2 not found: ID does not exist" containerID="1af88faf77c39b32328cb76782352f857fbcd63dc0e4d9353ec4abd68962ffa2" Oct 06 14:58:55 crc kubenswrapper[4757]: I1006 14:58:55.971360 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1af88faf77c39b32328cb76782352f857fbcd63dc0e4d9353ec4abd68962ffa2"} err="failed to get container status \"1af88faf77c39b32328cb76782352f857fbcd63dc0e4d9353ec4abd68962ffa2\": rpc error: code = NotFound desc = could not find container \"1af88faf77c39b32328cb76782352f857fbcd63dc0e4d9353ec4abd68962ffa2\": container with ID starting with 1af88faf77c39b32328cb76782352f857fbcd63dc0e4d9353ec4abd68962ffa2 not found: ID does not exist" Oct 06 14:58:55 crc kubenswrapper[4757]: I1006 14:58:55.971382 4757 scope.go:117] "RemoveContainer" containerID="31e14067fb4d5d6b8fd3d5a6e0efd67f88854ceafef2c70c4e6e3ef902afd2de" Oct 06 14:58:55 crc kubenswrapper[4757]: E1006 14:58:55.971698 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"31e14067fb4d5d6b8fd3d5a6e0efd67f88854ceafef2c70c4e6e3ef902afd2de\": container with ID starting with 31e14067fb4d5d6b8fd3d5a6e0efd67f88854ceafef2c70c4e6e3ef902afd2de not found: ID does not exist" containerID="31e14067fb4d5d6b8fd3d5a6e0efd67f88854ceafef2c70c4e6e3ef902afd2de" Oct 06 14:58:55 crc kubenswrapper[4757]: I1006 14:58:55.971737 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"31e14067fb4d5d6b8fd3d5a6e0efd67f88854ceafef2c70c4e6e3ef902afd2de"} err="failed to get container status \"31e14067fb4d5d6b8fd3d5a6e0efd67f88854ceafef2c70c4e6e3ef902afd2de\": rpc error: code = NotFound desc = could not find container \"31e14067fb4d5d6b8fd3d5a6e0efd67f88854ceafef2c70c4e6e3ef902afd2de\": container with ID starting with 31e14067fb4d5d6b8fd3d5a6e0efd67f88854ceafef2c70c4e6e3ef902afd2de not found: ID does not exist" Oct 06 14:58:56 crc kubenswrapper[4757]: I1006 14:58:56.195731 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b3395743-c9ac-4836-afd5-1cbebeec41de" path="/var/lib/kubelet/pods/b3395743-c9ac-4836-afd5-1cbebeec41de/volumes" Oct 06 15:00:00 crc kubenswrapper[4757]: I1006 15:00:00.155758 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29329380-7j8zl"] Oct 06 15:00:00 crc kubenswrapper[4757]: E1006 15:00:00.157057 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3395743-c9ac-4836-afd5-1cbebeec41de" containerName="registry-server" Oct 06 15:00:00 crc kubenswrapper[4757]: I1006 15:00:00.157083 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3395743-c9ac-4836-afd5-1cbebeec41de" containerName="registry-server" Oct 06 15:00:00 crc kubenswrapper[4757]: E1006 15:00:00.157159 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3395743-c9ac-4836-afd5-1cbebeec41de" containerName="extract-utilities" Oct 06 15:00:00 crc kubenswrapper[4757]: I1006 15:00:00.157172 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3395743-c9ac-4836-afd5-1cbebeec41de" containerName="extract-utilities" Oct 06 15:00:00 crc kubenswrapper[4757]: E1006 15:00:00.157189 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3395743-c9ac-4836-afd5-1cbebeec41de" containerName="extract-content" Oct 06 15:00:00 crc kubenswrapper[4757]: I1006 15:00:00.157202 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3395743-c9ac-4836-afd5-1cbebeec41de" containerName="extract-content" Oct 06 15:00:00 crc kubenswrapper[4757]: I1006 15:00:00.157479 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="b3395743-c9ac-4836-afd5-1cbebeec41de" containerName="registry-server" Oct 06 15:00:00 crc kubenswrapper[4757]: I1006 15:00:00.158906 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29329380-7j8zl" Oct 06 15:00:00 crc kubenswrapper[4757]: I1006 15:00:00.162526 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 06 15:00:00 crc kubenswrapper[4757]: I1006 15:00:00.163303 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 06 15:00:00 crc kubenswrapper[4757]: I1006 15:00:00.164546 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29329380-7j8zl"] Oct 06 15:00:00 crc kubenswrapper[4757]: I1006 15:00:00.360644 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a5abcdb0-063d-4b8e-ae7a-c56912776067-secret-volume\") pod \"collect-profiles-29329380-7j8zl\" (UID: \"a5abcdb0-063d-4b8e-ae7a-c56912776067\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29329380-7j8zl" Oct 06 15:00:00 crc kubenswrapper[4757]: I1006 15:00:00.360690 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a5abcdb0-063d-4b8e-ae7a-c56912776067-config-volume\") pod \"collect-profiles-29329380-7j8zl\" (UID: \"a5abcdb0-063d-4b8e-ae7a-c56912776067\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29329380-7j8zl" Oct 06 15:00:00 crc kubenswrapper[4757]: I1006 15:00:00.360725 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x7tdx\" (UniqueName: \"kubernetes.io/projected/a5abcdb0-063d-4b8e-ae7a-c56912776067-kube-api-access-x7tdx\") pod \"collect-profiles-29329380-7j8zl\" (UID: \"a5abcdb0-063d-4b8e-ae7a-c56912776067\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29329380-7j8zl" Oct 06 15:00:00 crc kubenswrapper[4757]: I1006 15:00:00.461973 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a5abcdb0-063d-4b8e-ae7a-c56912776067-secret-volume\") pod \"collect-profiles-29329380-7j8zl\" (UID: \"a5abcdb0-063d-4b8e-ae7a-c56912776067\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29329380-7j8zl" Oct 06 15:00:00 crc kubenswrapper[4757]: I1006 15:00:00.462037 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a5abcdb0-063d-4b8e-ae7a-c56912776067-config-volume\") pod \"collect-profiles-29329380-7j8zl\" (UID: \"a5abcdb0-063d-4b8e-ae7a-c56912776067\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29329380-7j8zl" Oct 06 15:00:00 crc kubenswrapper[4757]: I1006 15:00:00.462122 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x7tdx\" (UniqueName: \"kubernetes.io/projected/a5abcdb0-063d-4b8e-ae7a-c56912776067-kube-api-access-x7tdx\") pod \"collect-profiles-29329380-7j8zl\" (UID: \"a5abcdb0-063d-4b8e-ae7a-c56912776067\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29329380-7j8zl" Oct 06 15:00:00 crc kubenswrapper[4757]: I1006 15:00:00.463174 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a5abcdb0-063d-4b8e-ae7a-c56912776067-config-volume\") pod \"collect-profiles-29329380-7j8zl\" (UID: \"a5abcdb0-063d-4b8e-ae7a-c56912776067\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29329380-7j8zl" Oct 06 15:00:00 crc kubenswrapper[4757]: I1006 15:00:00.475992 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a5abcdb0-063d-4b8e-ae7a-c56912776067-secret-volume\") pod \"collect-profiles-29329380-7j8zl\" (UID: \"a5abcdb0-063d-4b8e-ae7a-c56912776067\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29329380-7j8zl" Oct 06 15:00:00 crc kubenswrapper[4757]: I1006 15:00:00.481180 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x7tdx\" (UniqueName: \"kubernetes.io/projected/a5abcdb0-063d-4b8e-ae7a-c56912776067-kube-api-access-x7tdx\") pod \"collect-profiles-29329380-7j8zl\" (UID: \"a5abcdb0-063d-4b8e-ae7a-c56912776067\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29329380-7j8zl" Oct 06 15:00:00 crc kubenswrapper[4757]: I1006 15:00:00.778408 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29329380-7j8zl" Oct 06 15:00:01 crc kubenswrapper[4757]: I1006 15:00:01.010154 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29329380-7j8zl"] Oct 06 15:00:01 crc kubenswrapper[4757]: I1006 15:00:01.425450 4757 generic.go:334] "Generic (PLEG): container finished" podID="a5abcdb0-063d-4b8e-ae7a-c56912776067" containerID="361dca09b7dfe3865f4139915c480cefdff21eadb5d0f31701ee2ee82f4395ce" exitCode=0 Oct 06 15:00:01 crc kubenswrapper[4757]: I1006 15:00:01.425556 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29329380-7j8zl" event={"ID":"a5abcdb0-063d-4b8e-ae7a-c56912776067","Type":"ContainerDied","Data":"361dca09b7dfe3865f4139915c480cefdff21eadb5d0f31701ee2ee82f4395ce"} Oct 06 15:00:01 crc kubenswrapper[4757]: I1006 15:00:01.425836 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29329380-7j8zl" event={"ID":"a5abcdb0-063d-4b8e-ae7a-c56912776067","Type":"ContainerStarted","Data":"1644adcf7967b7c404e2d352533f83052bf6a3bf0fa26418c5625470b9d99929"} Oct 06 15:00:02 crc kubenswrapper[4757]: I1006 15:00:02.725671 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29329380-7j8zl" Oct 06 15:00:02 crc kubenswrapper[4757]: I1006 15:00:02.902665 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a5abcdb0-063d-4b8e-ae7a-c56912776067-secret-volume\") pod \"a5abcdb0-063d-4b8e-ae7a-c56912776067\" (UID: \"a5abcdb0-063d-4b8e-ae7a-c56912776067\") " Oct 06 15:00:02 crc kubenswrapper[4757]: I1006 15:00:02.902752 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a5abcdb0-063d-4b8e-ae7a-c56912776067-config-volume\") pod \"a5abcdb0-063d-4b8e-ae7a-c56912776067\" (UID: \"a5abcdb0-063d-4b8e-ae7a-c56912776067\") " Oct 06 15:00:02 crc kubenswrapper[4757]: I1006 15:00:02.902831 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7tdx\" (UniqueName: \"kubernetes.io/projected/a5abcdb0-063d-4b8e-ae7a-c56912776067-kube-api-access-x7tdx\") pod \"a5abcdb0-063d-4b8e-ae7a-c56912776067\" (UID: \"a5abcdb0-063d-4b8e-ae7a-c56912776067\") " Oct 06 15:00:02 crc kubenswrapper[4757]: I1006 15:00:02.903628 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a5abcdb0-063d-4b8e-ae7a-c56912776067-config-volume" (OuterVolumeSpecName: "config-volume") pod "a5abcdb0-063d-4b8e-ae7a-c56912776067" (UID: "a5abcdb0-063d-4b8e-ae7a-c56912776067"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 15:00:02 crc kubenswrapper[4757]: I1006 15:00:02.911182 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a5abcdb0-063d-4b8e-ae7a-c56912776067-kube-api-access-x7tdx" (OuterVolumeSpecName: "kube-api-access-x7tdx") pod "a5abcdb0-063d-4b8e-ae7a-c56912776067" (UID: "a5abcdb0-063d-4b8e-ae7a-c56912776067"). InnerVolumeSpecName "kube-api-access-x7tdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 15:00:02 crc kubenswrapper[4757]: I1006 15:00:02.911315 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a5abcdb0-063d-4b8e-ae7a-c56912776067-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "a5abcdb0-063d-4b8e-ae7a-c56912776067" (UID: "a5abcdb0-063d-4b8e-ae7a-c56912776067"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 15:00:03 crc kubenswrapper[4757]: I1006 15:00:03.005220 4757 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a5abcdb0-063d-4b8e-ae7a-c56912776067-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 06 15:00:03 crc kubenswrapper[4757]: I1006 15:00:03.005268 4757 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a5abcdb0-063d-4b8e-ae7a-c56912776067-config-volume\") on node \"crc\" DevicePath \"\"" Oct 06 15:00:03 crc kubenswrapper[4757]: I1006 15:00:03.005282 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7tdx\" (UniqueName: \"kubernetes.io/projected/a5abcdb0-063d-4b8e-ae7a-c56912776067-kube-api-access-x7tdx\") on node \"crc\" DevicePath \"\"" Oct 06 15:00:03 crc kubenswrapper[4757]: I1006 15:00:03.457739 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29329380-7j8zl" event={"ID":"a5abcdb0-063d-4b8e-ae7a-c56912776067","Type":"ContainerDied","Data":"1644adcf7967b7c404e2d352533f83052bf6a3bf0fa26418c5625470b9d99929"} Oct 06 15:00:03 crc kubenswrapper[4757]: I1006 15:00:03.458026 4757 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1644adcf7967b7c404e2d352533f83052bf6a3bf0fa26418c5625470b9d99929" Oct 06 15:00:03 crc kubenswrapper[4757]: I1006 15:00:03.458164 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29329380-7j8zl" Oct 06 15:00:03 crc kubenswrapper[4757]: I1006 15:00:03.805909 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29329335-bckbw"] Oct 06 15:00:03 crc kubenswrapper[4757]: I1006 15:00:03.812499 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29329335-bckbw"] Oct 06 15:00:04 crc kubenswrapper[4757]: I1006 15:00:04.189045 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="08b0a55f-e859-4942-b4c3-438f06cfabc8" path="/var/lib/kubelet/pods/08b0a55f-e859-4942-b4c3-438f06cfabc8/volumes" Oct 06 15:00:07 crc kubenswrapper[4757]: I1006 15:00:07.966904 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6987b5fc5c-db74b"] Oct 06 15:00:07 crc kubenswrapper[4757]: E1006 15:00:07.967587 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a5abcdb0-063d-4b8e-ae7a-c56912776067" containerName="collect-profiles" Oct 06 15:00:07 crc kubenswrapper[4757]: I1006 15:00:07.967606 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="a5abcdb0-063d-4b8e-ae7a-c56912776067" containerName="collect-profiles" Oct 06 15:00:07 crc kubenswrapper[4757]: I1006 15:00:07.967772 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="a5abcdb0-063d-4b8e-ae7a-c56912776067" containerName="collect-profiles" Oct 06 15:00:07 crc kubenswrapper[4757]: I1006 15:00:07.968663 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6987b5fc5c-db74b" Oct 06 15:00:07 crc kubenswrapper[4757]: I1006 15:00:07.973993 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-hcffz" Oct 06 15:00:07 crc kubenswrapper[4757]: I1006 15:00:07.974202 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Oct 06 15:00:07 crc kubenswrapper[4757]: I1006 15:00:07.974273 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Oct 06 15:00:07 crc kubenswrapper[4757]: I1006 15:00:07.974371 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Oct 06 15:00:07 crc kubenswrapper[4757]: I1006 15:00:07.977723 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-bb76c85cc-wvwsg"] Oct 06 15:00:07 crc kubenswrapper[4757]: I1006 15:00:07.982456 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bb76c85cc-wvwsg" Oct 06 15:00:07 crc kubenswrapper[4757]: I1006 15:00:07.983470 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Oct 06 15:00:07 crc kubenswrapper[4757]: I1006 15:00:07.991268 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6987b5fc5c-db74b"] Oct 06 15:00:08 crc kubenswrapper[4757]: I1006 15:00:08.008858 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-bb76c85cc-wvwsg"] Oct 06 15:00:08 crc kubenswrapper[4757]: I1006 15:00:08.082240 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ba4ff376-b636-4f20-8167-916c0b2d2a22-config\") pod \"dnsmasq-dns-6987b5fc5c-db74b\" (UID: \"ba4ff376-b636-4f20-8167-916c0b2d2a22\") " pod="openstack/dnsmasq-dns-6987b5fc5c-db74b" Oct 06 15:00:08 crc kubenswrapper[4757]: I1006 15:00:08.082321 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2vxhb\" (UniqueName: \"kubernetes.io/projected/ba4ff376-b636-4f20-8167-916c0b2d2a22-kube-api-access-2vxhb\") pod \"dnsmasq-dns-6987b5fc5c-db74b\" (UID: \"ba4ff376-b636-4f20-8167-916c0b2d2a22\") " pod="openstack/dnsmasq-dns-6987b5fc5c-db74b" Oct 06 15:00:08 crc kubenswrapper[4757]: I1006 15:00:08.082418 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f6ac60de-1f61-419a-86db-ddd4bba0b2b3-config\") pod \"dnsmasq-dns-bb76c85cc-wvwsg\" (UID: \"f6ac60de-1f61-419a-86db-ddd4bba0b2b3\") " pod="openstack/dnsmasq-dns-bb76c85cc-wvwsg" Oct 06 15:00:08 crc kubenswrapper[4757]: I1006 15:00:08.082448 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vqw54\" (UniqueName: \"kubernetes.io/projected/f6ac60de-1f61-419a-86db-ddd4bba0b2b3-kube-api-access-vqw54\") pod \"dnsmasq-dns-bb76c85cc-wvwsg\" (UID: \"f6ac60de-1f61-419a-86db-ddd4bba0b2b3\") " pod="openstack/dnsmasq-dns-bb76c85cc-wvwsg" Oct 06 15:00:08 crc kubenswrapper[4757]: I1006 15:00:08.082482 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ba4ff376-b636-4f20-8167-916c0b2d2a22-dns-svc\") pod \"dnsmasq-dns-6987b5fc5c-db74b\" (UID: \"ba4ff376-b636-4f20-8167-916c0b2d2a22\") " pod="openstack/dnsmasq-dns-6987b5fc5c-db74b" Oct 06 15:00:08 crc kubenswrapper[4757]: I1006 15:00:08.183673 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f6ac60de-1f61-419a-86db-ddd4bba0b2b3-config\") pod \"dnsmasq-dns-bb76c85cc-wvwsg\" (UID: \"f6ac60de-1f61-419a-86db-ddd4bba0b2b3\") " pod="openstack/dnsmasq-dns-bb76c85cc-wvwsg" Oct 06 15:00:08 crc kubenswrapper[4757]: I1006 15:00:08.183738 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vqw54\" (UniqueName: \"kubernetes.io/projected/f6ac60de-1f61-419a-86db-ddd4bba0b2b3-kube-api-access-vqw54\") pod \"dnsmasq-dns-bb76c85cc-wvwsg\" (UID: \"f6ac60de-1f61-419a-86db-ddd4bba0b2b3\") " pod="openstack/dnsmasq-dns-bb76c85cc-wvwsg" Oct 06 15:00:08 crc kubenswrapper[4757]: I1006 15:00:08.183768 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ba4ff376-b636-4f20-8167-916c0b2d2a22-dns-svc\") pod \"dnsmasq-dns-6987b5fc5c-db74b\" (UID: \"ba4ff376-b636-4f20-8167-916c0b2d2a22\") " pod="openstack/dnsmasq-dns-6987b5fc5c-db74b" Oct 06 15:00:08 crc kubenswrapper[4757]: I1006 15:00:08.183819 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2vxhb\" (UniqueName: \"kubernetes.io/projected/ba4ff376-b636-4f20-8167-916c0b2d2a22-kube-api-access-2vxhb\") pod \"dnsmasq-dns-6987b5fc5c-db74b\" (UID: \"ba4ff376-b636-4f20-8167-916c0b2d2a22\") " pod="openstack/dnsmasq-dns-6987b5fc5c-db74b" Oct 06 15:00:08 crc kubenswrapper[4757]: I1006 15:00:08.183836 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ba4ff376-b636-4f20-8167-916c0b2d2a22-config\") pod \"dnsmasq-dns-6987b5fc5c-db74b\" (UID: \"ba4ff376-b636-4f20-8167-916c0b2d2a22\") " pod="openstack/dnsmasq-dns-6987b5fc5c-db74b" Oct 06 15:00:08 crc kubenswrapper[4757]: I1006 15:00:08.184518 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f6ac60de-1f61-419a-86db-ddd4bba0b2b3-config\") pod \"dnsmasq-dns-bb76c85cc-wvwsg\" (UID: \"f6ac60de-1f61-419a-86db-ddd4bba0b2b3\") " pod="openstack/dnsmasq-dns-bb76c85cc-wvwsg" Oct 06 15:00:08 crc kubenswrapper[4757]: I1006 15:00:08.184954 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ba4ff376-b636-4f20-8167-916c0b2d2a22-dns-svc\") pod \"dnsmasq-dns-6987b5fc5c-db74b\" (UID: \"ba4ff376-b636-4f20-8167-916c0b2d2a22\") " pod="openstack/dnsmasq-dns-6987b5fc5c-db74b" Oct 06 15:00:08 crc kubenswrapper[4757]: I1006 15:00:08.185141 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ba4ff376-b636-4f20-8167-916c0b2d2a22-config\") pod \"dnsmasq-dns-6987b5fc5c-db74b\" (UID: \"ba4ff376-b636-4f20-8167-916c0b2d2a22\") " pod="openstack/dnsmasq-dns-6987b5fc5c-db74b" Oct 06 15:00:08 crc kubenswrapper[4757]: I1006 15:00:08.239792 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6987b5fc5c-db74b"] Oct 06 15:00:08 crc kubenswrapper[4757]: E1006 15:00:08.240395 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[kube-api-access-2vxhb], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/dnsmasq-dns-6987b5fc5c-db74b" podUID="ba4ff376-b636-4f20-8167-916c0b2d2a22" Oct 06 15:00:08 crc kubenswrapper[4757]: I1006 15:00:08.259164 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2vxhb\" (UniqueName: \"kubernetes.io/projected/ba4ff376-b636-4f20-8167-916c0b2d2a22-kube-api-access-2vxhb\") pod \"dnsmasq-dns-6987b5fc5c-db74b\" (UID: \"ba4ff376-b636-4f20-8167-916c0b2d2a22\") " pod="openstack/dnsmasq-dns-6987b5fc5c-db74b" Oct 06 15:00:08 crc kubenswrapper[4757]: I1006 15:00:08.259932 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vqw54\" (UniqueName: \"kubernetes.io/projected/f6ac60de-1f61-419a-86db-ddd4bba0b2b3-kube-api-access-vqw54\") pod \"dnsmasq-dns-bb76c85cc-wvwsg\" (UID: \"f6ac60de-1f61-419a-86db-ddd4bba0b2b3\") " pod="openstack/dnsmasq-dns-bb76c85cc-wvwsg" Oct 06 15:00:08 crc kubenswrapper[4757]: I1006 15:00:08.265473 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-74f54d5d49-cwx7t"] Oct 06 15:00:08 crc kubenswrapper[4757]: I1006 15:00:08.266774 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74f54d5d49-cwx7t" Oct 06 15:00:08 crc kubenswrapper[4757]: I1006 15:00:08.270693 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-74f54d5d49-cwx7t"] Oct 06 15:00:08 crc kubenswrapper[4757]: I1006 15:00:08.285800 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f866e41e-f393-40c6-9b82-c0e4eaeb4283-dns-svc\") pod \"dnsmasq-dns-74f54d5d49-cwx7t\" (UID: \"f866e41e-f393-40c6-9b82-c0e4eaeb4283\") " pod="openstack/dnsmasq-dns-74f54d5d49-cwx7t" Oct 06 15:00:08 crc kubenswrapper[4757]: I1006 15:00:08.285898 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f866e41e-f393-40c6-9b82-c0e4eaeb4283-config\") pod \"dnsmasq-dns-74f54d5d49-cwx7t\" (UID: \"f866e41e-f393-40c6-9b82-c0e4eaeb4283\") " pod="openstack/dnsmasq-dns-74f54d5d49-cwx7t" Oct 06 15:00:08 crc kubenswrapper[4757]: I1006 15:00:08.285934 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b4tnp\" (UniqueName: \"kubernetes.io/projected/f866e41e-f393-40c6-9b82-c0e4eaeb4283-kube-api-access-b4tnp\") pod \"dnsmasq-dns-74f54d5d49-cwx7t\" (UID: \"f866e41e-f393-40c6-9b82-c0e4eaeb4283\") " pod="openstack/dnsmasq-dns-74f54d5d49-cwx7t" Oct 06 15:00:08 crc kubenswrapper[4757]: I1006 15:00:08.334323 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bb76c85cc-wvwsg" Oct 06 15:00:08 crc kubenswrapper[4757]: I1006 15:00:08.387014 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f866e41e-f393-40c6-9b82-c0e4eaeb4283-dns-svc\") pod \"dnsmasq-dns-74f54d5d49-cwx7t\" (UID: \"f866e41e-f393-40c6-9b82-c0e4eaeb4283\") " pod="openstack/dnsmasq-dns-74f54d5d49-cwx7t" Oct 06 15:00:08 crc kubenswrapper[4757]: I1006 15:00:08.387144 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f866e41e-f393-40c6-9b82-c0e4eaeb4283-config\") pod \"dnsmasq-dns-74f54d5d49-cwx7t\" (UID: \"f866e41e-f393-40c6-9b82-c0e4eaeb4283\") " pod="openstack/dnsmasq-dns-74f54d5d49-cwx7t" Oct 06 15:00:08 crc kubenswrapper[4757]: I1006 15:00:08.387174 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b4tnp\" (UniqueName: \"kubernetes.io/projected/f866e41e-f393-40c6-9b82-c0e4eaeb4283-kube-api-access-b4tnp\") pod \"dnsmasq-dns-74f54d5d49-cwx7t\" (UID: \"f866e41e-f393-40c6-9b82-c0e4eaeb4283\") " pod="openstack/dnsmasq-dns-74f54d5d49-cwx7t" Oct 06 15:00:08 crc kubenswrapper[4757]: I1006 15:00:08.387826 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f866e41e-f393-40c6-9b82-c0e4eaeb4283-dns-svc\") pod \"dnsmasq-dns-74f54d5d49-cwx7t\" (UID: \"f866e41e-f393-40c6-9b82-c0e4eaeb4283\") " pod="openstack/dnsmasq-dns-74f54d5d49-cwx7t" Oct 06 15:00:08 crc kubenswrapper[4757]: I1006 15:00:08.388007 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f866e41e-f393-40c6-9b82-c0e4eaeb4283-config\") pod \"dnsmasq-dns-74f54d5d49-cwx7t\" (UID: \"f866e41e-f393-40c6-9b82-c0e4eaeb4283\") " pod="openstack/dnsmasq-dns-74f54d5d49-cwx7t" Oct 06 15:00:08 crc kubenswrapper[4757]: I1006 15:00:08.408931 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b4tnp\" (UniqueName: \"kubernetes.io/projected/f866e41e-f393-40c6-9b82-c0e4eaeb4283-kube-api-access-b4tnp\") pod \"dnsmasq-dns-74f54d5d49-cwx7t\" (UID: \"f866e41e-f393-40c6-9b82-c0e4eaeb4283\") " pod="openstack/dnsmasq-dns-74f54d5d49-cwx7t" Oct 06 15:00:08 crc kubenswrapper[4757]: I1006 15:00:08.502396 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6987b5fc5c-db74b" Oct 06 15:00:08 crc kubenswrapper[4757]: I1006 15:00:08.520528 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6987b5fc5c-db74b" Oct 06 15:00:08 crc kubenswrapper[4757]: I1006 15:00:08.549484 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-bb76c85cc-wvwsg"] Oct 06 15:00:08 crc kubenswrapper[4757]: I1006 15:00:08.579152 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5cc4f76bbf-f8knj"] Oct 06 15:00:08 crc kubenswrapper[4757]: I1006 15:00:08.580415 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5cc4f76bbf-f8knj" Oct 06 15:00:08 crc kubenswrapper[4757]: I1006 15:00:08.583808 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5cc4f76bbf-f8knj"] Oct 06 15:00:08 crc kubenswrapper[4757]: I1006 15:00:08.606162 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74f54d5d49-cwx7t" Oct 06 15:00:08 crc kubenswrapper[4757]: I1006 15:00:08.691242 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ba4ff376-b636-4f20-8167-916c0b2d2a22-dns-svc\") pod \"ba4ff376-b636-4f20-8167-916c0b2d2a22\" (UID: \"ba4ff376-b636-4f20-8167-916c0b2d2a22\") " Oct 06 15:00:08 crc kubenswrapper[4757]: I1006 15:00:08.691310 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ba4ff376-b636-4f20-8167-916c0b2d2a22-config\") pod \"ba4ff376-b636-4f20-8167-916c0b2d2a22\" (UID: \"ba4ff376-b636-4f20-8167-916c0b2d2a22\") " Oct 06 15:00:08 crc kubenswrapper[4757]: I1006 15:00:08.691407 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2vxhb\" (UniqueName: \"kubernetes.io/projected/ba4ff376-b636-4f20-8167-916c0b2d2a22-kube-api-access-2vxhb\") pod \"ba4ff376-b636-4f20-8167-916c0b2d2a22\" (UID: \"ba4ff376-b636-4f20-8167-916c0b2d2a22\") " Oct 06 15:00:08 crc kubenswrapper[4757]: I1006 15:00:08.691807 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ba4ff376-b636-4f20-8167-916c0b2d2a22-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "ba4ff376-b636-4f20-8167-916c0b2d2a22" (UID: "ba4ff376-b636-4f20-8167-916c0b2d2a22"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 15:00:08 crc kubenswrapper[4757]: I1006 15:00:08.692081 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ba4ff376-b636-4f20-8167-916c0b2d2a22-config" (OuterVolumeSpecName: "config") pod "ba4ff376-b636-4f20-8167-916c0b2d2a22" (UID: "ba4ff376-b636-4f20-8167-916c0b2d2a22"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 15:00:08 crc kubenswrapper[4757]: I1006 15:00:08.692323 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ec6b46ec-9e74-4432-992c-e5a92a8d38e3-config\") pod \"dnsmasq-dns-5cc4f76bbf-f8knj\" (UID: \"ec6b46ec-9e74-4432-992c-e5a92a8d38e3\") " pod="openstack/dnsmasq-dns-5cc4f76bbf-f8knj" Oct 06 15:00:08 crc kubenswrapper[4757]: I1006 15:00:08.692368 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ec6b46ec-9e74-4432-992c-e5a92a8d38e3-dns-svc\") pod \"dnsmasq-dns-5cc4f76bbf-f8knj\" (UID: \"ec6b46ec-9e74-4432-992c-e5a92a8d38e3\") " pod="openstack/dnsmasq-dns-5cc4f76bbf-f8knj" Oct 06 15:00:08 crc kubenswrapper[4757]: I1006 15:00:08.692393 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nh6xg\" (UniqueName: \"kubernetes.io/projected/ec6b46ec-9e74-4432-992c-e5a92a8d38e3-kube-api-access-nh6xg\") pod \"dnsmasq-dns-5cc4f76bbf-f8knj\" (UID: \"ec6b46ec-9e74-4432-992c-e5a92a8d38e3\") " pod="openstack/dnsmasq-dns-5cc4f76bbf-f8knj" Oct 06 15:00:08 crc kubenswrapper[4757]: I1006 15:00:08.692445 4757 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ba4ff376-b636-4f20-8167-916c0b2d2a22-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 06 15:00:08 crc kubenswrapper[4757]: I1006 15:00:08.692460 4757 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ba4ff376-b636-4f20-8167-916c0b2d2a22-config\") on node \"crc\" DevicePath \"\"" Oct 06 15:00:08 crc kubenswrapper[4757]: I1006 15:00:08.697758 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ba4ff376-b636-4f20-8167-916c0b2d2a22-kube-api-access-2vxhb" (OuterVolumeSpecName: "kube-api-access-2vxhb") pod "ba4ff376-b636-4f20-8167-916c0b2d2a22" (UID: "ba4ff376-b636-4f20-8167-916c0b2d2a22"). InnerVolumeSpecName "kube-api-access-2vxhb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 15:00:08 crc kubenswrapper[4757]: I1006 15:00:08.794896 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ec6b46ec-9e74-4432-992c-e5a92a8d38e3-config\") pod \"dnsmasq-dns-5cc4f76bbf-f8knj\" (UID: \"ec6b46ec-9e74-4432-992c-e5a92a8d38e3\") " pod="openstack/dnsmasq-dns-5cc4f76bbf-f8knj" Oct 06 15:00:08 crc kubenswrapper[4757]: I1006 15:00:08.795997 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ec6b46ec-9e74-4432-992c-e5a92a8d38e3-dns-svc\") pod \"dnsmasq-dns-5cc4f76bbf-f8knj\" (UID: \"ec6b46ec-9e74-4432-992c-e5a92a8d38e3\") " pod="openstack/dnsmasq-dns-5cc4f76bbf-f8knj" Oct 06 15:00:08 crc kubenswrapper[4757]: I1006 15:00:08.796019 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nh6xg\" (UniqueName: \"kubernetes.io/projected/ec6b46ec-9e74-4432-992c-e5a92a8d38e3-kube-api-access-nh6xg\") pod \"dnsmasq-dns-5cc4f76bbf-f8knj\" (UID: \"ec6b46ec-9e74-4432-992c-e5a92a8d38e3\") " pod="openstack/dnsmasq-dns-5cc4f76bbf-f8knj" Oct 06 15:00:08 crc kubenswrapper[4757]: I1006 15:00:08.796187 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2vxhb\" (UniqueName: \"kubernetes.io/projected/ba4ff376-b636-4f20-8167-916c0b2d2a22-kube-api-access-2vxhb\") on node \"crc\" DevicePath \"\"" Oct 06 15:00:08 crc kubenswrapper[4757]: I1006 15:00:08.795821 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ec6b46ec-9e74-4432-992c-e5a92a8d38e3-config\") pod \"dnsmasq-dns-5cc4f76bbf-f8knj\" (UID: \"ec6b46ec-9e74-4432-992c-e5a92a8d38e3\") " pod="openstack/dnsmasq-dns-5cc4f76bbf-f8knj" Oct 06 15:00:08 crc kubenswrapper[4757]: I1006 15:00:08.797032 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ec6b46ec-9e74-4432-992c-e5a92a8d38e3-dns-svc\") pod \"dnsmasq-dns-5cc4f76bbf-f8knj\" (UID: \"ec6b46ec-9e74-4432-992c-e5a92a8d38e3\") " pod="openstack/dnsmasq-dns-5cc4f76bbf-f8knj" Oct 06 15:00:08 crc kubenswrapper[4757]: I1006 15:00:08.813654 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nh6xg\" (UniqueName: \"kubernetes.io/projected/ec6b46ec-9e74-4432-992c-e5a92a8d38e3-kube-api-access-nh6xg\") pod \"dnsmasq-dns-5cc4f76bbf-f8knj\" (UID: \"ec6b46ec-9e74-4432-992c-e5a92a8d38e3\") " pod="openstack/dnsmasq-dns-5cc4f76bbf-f8knj" Oct 06 15:00:08 crc kubenswrapper[4757]: I1006 15:00:08.852959 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-bb76c85cc-wvwsg"] Oct 06 15:00:08 crc kubenswrapper[4757]: I1006 15:00:08.908079 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5cc4f76bbf-f8knj" Oct 06 15:00:09 crc kubenswrapper[4757]: I1006 15:00:09.033299 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-74f54d5d49-cwx7t"] Oct 06 15:00:09 crc kubenswrapper[4757]: I1006 15:00:09.340356 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5cc4f76bbf-f8knj"] Oct 06 15:00:09 crc kubenswrapper[4757]: W1006 15:00:09.343476 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podec6b46ec_9e74_4432_992c_e5a92a8d38e3.slice/crio-7960783aab4501d0e9a072c90894f43ddca613e9bfb66fbb165ac2d965806718 WatchSource:0}: Error finding container 7960783aab4501d0e9a072c90894f43ddca613e9bfb66fbb165ac2d965806718: Status 404 returned error can't find the container with id 7960783aab4501d0e9a072c90894f43ddca613e9bfb66fbb165ac2d965806718 Oct 06 15:00:09 crc kubenswrapper[4757]: I1006 15:00:09.435447 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 06 15:00:09 crc kubenswrapper[4757]: I1006 15:00:09.437334 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 06 15:00:09 crc kubenswrapper[4757]: I1006 15:00:09.439291 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Oct 06 15:00:09 crc kubenswrapper[4757]: I1006 15:00:09.439907 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Oct 06 15:00:09 crc kubenswrapper[4757]: I1006 15:00:09.440059 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-wdvcn" Oct 06 15:00:09 crc kubenswrapper[4757]: I1006 15:00:09.440277 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Oct 06 15:00:09 crc kubenswrapper[4757]: I1006 15:00:09.440337 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Oct 06 15:00:09 crc kubenswrapper[4757]: I1006 15:00:09.441178 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Oct 06 15:00:09 crc kubenswrapper[4757]: I1006 15:00:09.441367 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Oct 06 15:00:09 crc kubenswrapper[4757]: I1006 15:00:09.444456 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 06 15:00:09 crc kubenswrapper[4757]: I1006 15:00:09.508595 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5cc4f76bbf-f8knj" event={"ID":"ec6b46ec-9e74-4432-992c-e5a92a8d38e3","Type":"ContainerStarted","Data":"7960783aab4501d0e9a072c90894f43ddca613e9bfb66fbb165ac2d965806718"} Oct 06 15:00:09 crc kubenswrapper[4757]: I1006 15:00:09.510724 4757 generic.go:334] "Generic (PLEG): container finished" podID="f6ac60de-1f61-419a-86db-ddd4bba0b2b3" containerID="0e7fc0600a8f23efcd335ea5c63494a58aeb3da4bcf48d24a668de2f0d1a8cae" exitCode=0 Oct 06 15:00:09 crc kubenswrapper[4757]: I1006 15:00:09.510799 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bb76c85cc-wvwsg" event={"ID":"f6ac60de-1f61-419a-86db-ddd4bba0b2b3","Type":"ContainerDied","Data":"0e7fc0600a8f23efcd335ea5c63494a58aeb3da4bcf48d24a668de2f0d1a8cae"} Oct 06 15:00:09 crc kubenswrapper[4757]: I1006 15:00:09.510817 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bb76c85cc-wvwsg" event={"ID":"f6ac60de-1f61-419a-86db-ddd4bba0b2b3","Type":"ContainerStarted","Data":"37e8d846d8a3e47f4739b2be5bb6d70c78a0b497e605115564bfb16d4ce48c9a"} Oct 06 15:00:09 crc kubenswrapper[4757]: I1006 15:00:09.512539 4757 generic.go:334] "Generic (PLEG): container finished" podID="f866e41e-f393-40c6-9b82-c0e4eaeb4283" containerID="b203c8c41bbcfd467d4951c482b558b9da9cc48e2d590f4e6d7cdd00feb20afd" exitCode=0 Oct 06 15:00:09 crc kubenswrapper[4757]: I1006 15:00:09.512636 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74f54d5d49-cwx7t" event={"ID":"f866e41e-f393-40c6-9b82-c0e4eaeb4283","Type":"ContainerDied","Data":"b203c8c41bbcfd467d4951c482b558b9da9cc48e2d590f4e6d7cdd00feb20afd"} Oct 06 15:00:09 crc kubenswrapper[4757]: I1006 15:00:09.512690 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74f54d5d49-cwx7t" event={"ID":"f866e41e-f393-40c6-9b82-c0e4eaeb4283","Type":"ContainerStarted","Data":"f5595d94ce498d89235c384af4e40792d32833bb20b8f4ccab713edcf86b7276"} Oct 06 15:00:09 crc kubenswrapper[4757]: I1006 15:00:09.512803 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6987b5fc5c-db74b" Oct 06 15:00:09 crc kubenswrapper[4757]: I1006 15:00:09.606227 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/2d9fa0d1-9bdd-44f2-aa80-0037e7249d51-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"2d9fa0d1-9bdd-44f2-aa80-0037e7249d51\") " pod="openstack/rabbitmq-cell1-server-0" Oct 06 15:00:09 crc kubenswrapper[4757]: I1006 15:00:09.606501 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-cbc005a5-9898-4ef1-98dc-4fdf4e9aa648\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-cbc005a5-9898-4ef1-98dc-4fdf4e9aa648\") pod \"rabbitmq-cell1-server-0\" (UID: \"2d9fa0d1-9bdd-44f2-aa80-0037e7249d51\") " pod="openstack/rabbitmq-cell1-server-0" Oct 06 15:00:09 crc kubenswrapper[4757]: I1006 15:00:09.606638 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/2d9fa0d1-9bdd-44f2-aa80-0037e7249d51-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"2d9fa0d1-9bdd-44f2-aa80-0037e7249d51\") " pod="openstack/rabbitmq-cell1-server-0" Oct 06 15:00:09 crc kubenswrapper[4757]: I1006 15:00:09.606719 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/2d9fa0d1-9bdd-44f2-aa80-0037e7249d51-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"2d9fa0d1-9bdd-44f2-aa80-0037e7249d51\") " pod="openstack/rabbitmq-cell1-server-0" Oct 06 15:00:09 crc kubenswrapper[4757]: I1006 15:00:09.606805 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dtwzm\" (UniqueName: \"kubernetes.io/projected/2d9fa0d1-9bdd-44f2-aa80-0037e7249d51-kube-api-access-dtwzm\") pod \"rabbitmq-cell1-server-0\" (UID: \"2d9fa0d1-9bdd-44f2-aa80-0037e7249d51\") " pod="openstack/rabbitmq-cell1-server-0" Oct 06 15:00:09 crc kubenswrapper[4757]: I1006 15:00:09.606961 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/2d9fa0d1-9bdd-44f2-aa80-0037e7249d51-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"2d9fa0d1-9bdd-44f2-aa80-0037e7249d51\") " pod="openstack/rabbitmq-cell1-server-0" Oct 06 15:00:09 crc kubenswrapper[4757]: I1006 15:00:09.607122 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/2d9fa0d1-9bdd-44f2-aa80-0037e7249d51-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"2d9fa0d1-9bdd-44f2-aa80-0037e7249d51\") " pod="openstack/rabbitmq-cell1-server-0" Oct 06 15:00:09 crc kubenswrapper[4757]: I1006 15:00:09.607258 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/2d9fa0d1-9bdd-44f2-aa80-0037e7249d51-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"2d9fa0d1-9bdd-44f2-aa80-0037e7249d51\") " pod="openstack/rabbitmq-cell1-server-0" Oct 06 15:00:09 crc kubenswrapper[4757]: I1006 15:00:09.607388 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/2d9fa0d1-9bdd-44f2-aa80-0037e7249d51-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"2d9fa0d1-9bdd-44f2-aa80-0037e7249d51\") " pod="openstack/rabbitmq-cell1-server-0" Oct 06 15:00:09 crc kubenswrapper[4757]: I1006 15:00:09.607496 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2d9fa0d1-9bdd-44f2-aa80-0037e7249d51-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"2d9fa0d1-9bdd-44f2-aa80-0037e7249d51\") " pod="openstack/rabbitmq-cell1-server-0" Oct 06 15:00:09 crc kubenswrapper[4757]: I1006 15:00:09.607602 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/2d9fa0d1-9bdd-44f2-aa80-0037e7249d51-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"2d9fa0d1-9bdd-44f2-aa80-0037e7249d51\") " pod="openstack/rabbitmq-cell1-server-0" Oct 06 15:00:09 crc kubenswrapper[4757]: I1006 15:00:09.612781 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6987b5fc5c-db74b"] Oct 06 15:00:09 crc kubenswrapper[4757]: I1006 15:00:09.618798 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6987b5fc5c-db74b"] Oct 06 15:00:09 crc kubenswrapper[4757]: I1006 15:00:09.710204 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/2d9fa0d1-9bdd-44f2-aa80-0037e7249d51-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"2d9fa0d1-9bdd-44f2-aa80-0037e7249d51\") " pod="openstack/rabbitmq-cell1-server-0" Oct 06 15:00:09 crc kubenswrapper[4757]: I1006 15:00:09.710598 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2d9fa0d1-9bdd-44f2-aa80-0037e7249d51-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"2d9fa0d1-9bdd-44f2-aa80-0037e7249d51\") " pod="openstack/rabbitmq-cell1-server-0" Oct 06 15:00:09 crc kubenswrapper[4757]: I1006 15:00:09.710619 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/2d9fa0d1-9bdd-44f2-aa80-0037e7249d51-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"2d9fa0d1-9bdd-44f2-aa80-0037e7249d51\") " pod="openstack/rabbitmq-cell1-server-0" Oct 06 15:00:09 crc kubenswrapper[4757]: I1006 15:00:09.710649 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/2d9fa0d1-9bdd-44f2-aa80-0037e7249d51-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"2d9fa0d1-9bdd-44f2-aa80-0037e7249d51\") " pod="openstack/rabbitmq-cell1-server-0" Oct 06 15:00:09 crc kubenswrapper[4757]: I1006 15:00:09.710680 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-cbc005a5-9898-4ef1-98dc-4fdf4e9aa648\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-cbc005a5-9898-4ef1-98dc-4fdf4e9aa648\") pod \"rabbitmq-cell1-server-0\" (UID: \"2d9fa0d1-9bdd-44f2-aa80-0037e7249d51\") " pod="openstack/rabbitmq-cell1-server-0" Oct 06 15:00:09 crc kubenswrapper[4757]: I1006 15:00:09.710694 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/2d9fa0d1-9bdd-44f2-aa80-0037e7249d51-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"2d9fa0d1-9bdd-44f2-aa80-0037e7249d51\") " pod="openstack/rabbitmq-cell1-server-0" Oct 06 15:00:09 crc kubenswrapper[4757]: I1006 15:00:09.710703 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/2d9fa0d1-9bdd-44f2-aa80-0037e7249d51-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"2d9fa0d1-9bdd-44f2-aa80-0037e7249d51\") " pod="openstack/rabbitmq-cell1-server-0" Oct 06 15:00:09 crc kubenswrapper[4757]: I1006 15:00:09.710710 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/2d9fa0d1-9bdd-44f2-aa80-0037e7249d51-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"2d9fa0d1-9bdd-44f2-aa80-0037e7249d51\") " pod="openstack/rabbitmq-cell1-server-0" Oct 06 15:00:09 crc kubenswrapper[4757]: I1006 15:00:09.710803 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dtwzm\" (UniqueName: \"kubernetes.io/projected/2d9fa0d1-9bdd-44f2-aa80-0037e7249d51-kube-api-access-dtwzm\") pod \"rabbitmq-cell1-server-0\" (UID: \"2d9fa0d1-9bdd-44f2-aa80-0037e7249d51\") " pod="openstack/rabbitmq-cell1-server-0" Oct 06 15:00:09 crc kubenswrapper[4757]: I1006 15:00:09.710824 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/2d9fa0d1-9bdd-44f2-aa80-0037e7249d51-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"2d9fa0d1-9bdd-44f2-aa80-0037e7249d51\") " pod="openstack/rabbitmq-cell1-server-0" Oct 06 15:00:09 crc kubenswrapper[4757]: I1006 15:00:09.710872 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/2d9fa0d1-9bdd-44f2-aa80-0037e7249d51-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"2d9fa0d1-9bdd-44f2-aa80-0037e7249d51\") " pod="openstack/rabbitmq-cell1-server-0" Oct 06 15:00:09 crc kubenswrapper[4757]: I1006 15:00:09.710944 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/2d9fa0d1-9bdd-44f2-aa80-0037e7249d51-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"2d9fa0d1-9bdd-44f2-aa80-0037e7249d51\") " pod="openstack/rabbitmq-cell1-server-0" Oct 06 15:00:09 crc kubenswrapper[4757]: I1006 15:00:09.712449 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/2d9fa0d1-9bdd-44f2-aa80-0037e7249d51-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"2d9fa0d1-9bdd-44f2-aa80-0037e7249d51\") " pod="openstack/rabbitmq-cell1-server-0" Oct 06 15:00:09 crc kubenswrapper[4757]: I1006 15:00:09.713069 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/2d9fa0d1-9bdd-44f2-aa80-0037e7249d51-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"2d9fa0d1-9bdd-44f2-aa80-0037e7249d51\") " pod="openstack/rabbitmq-cell1-server-0" Oct 06 15:00:09 crc kubenswrapper[4757]: I1006 15:00:09.713077 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2d9fa0d1-9bdd-44f2-aa80-0037e7249d51-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"2d9fa0d1-9bdd-44f2-aa80-0037e7249d51\") " pod="openstack/rabbitmq-cell1-server-0" Oct 06 15:00:09 crc kubenswrapper[4757]: I1006 15:00:09.715212 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Oct 06 15:00:09 crc kubenswrapper[4757]: I1006 15:00:09.716636 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 06 15:00:09 crc kubenswrapper[4757]: I1006 15:00:09.718127 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/2d9fa0d1-9bdd-44f2-aa80-0037e7249d51-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"2d9fa0d1-9bdd-44f2-aa80-0037e7249d51\") " pod="openstack/rabbitmq-cell1-server-0" Oct 06 15:00:09 crc kubenswrapper[4757]: I1006 15:00:09.719086 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Oct 06 15:00:09 crc kubenswrapper[4757]: I1006 15:00:09.719725 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Oct 06 15:00:09 crc kubenswrapper[4757]: I1006 15:00:09.719965 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Oct 06 15:00:09 crc kubenswrapper[4757]: I1006 15:00:09.720188 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-5cfkr" Oct 06 15:00:09 crc kubenswrapper[4757]: I1006 15:00:09.720357 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/2d9fa0d1-9bdd-44f2-aa80-0037e7249d51-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"2d9fa0d1-9bdd-44f2-aa80-0037e7249d51\") " pod="openstack/rabbitmq-cell1-server-0" Oct 06 15:00:09 crc kubenswrapper[4757]: I1006 15:00:09.720878 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Oct 06 15:00:09 crc kubenswrapper[4757]: I1006 15:00:09.721203 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Oct 06 15:00:09 crc kubenswrapper[4757]: I1006 15:00:09.721685 4757 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Oct 06 15:00:09 crc kubenswrapper[4757]: I1006 15:00:09.721719 4757 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-cbc005a5-9898-4ef1-98dc-4fdf4e9aa648\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-cbc005a5-9898-4ef1-98dc-4fdf4e9aa648\") pod \"rabbitmq-cell1-server-0\" (UID: \"2d9fa0d1-9bdd-44f2-aa80-0037e7249d51\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/41db9b64eefef90e622c0e26edee1493aa233875f0edd02f5e522320b4849b35/globalmount\"" pod="openstack/rabbitmq-cell1-server-0" Oct 06 15:00:09 crc kubenswrapper[4757]: I1006 15:00:09.722522 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/2d9fa0d1-9bdd-44f2-aa80-0037e7249d51-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"2d9fa0d1-9bdd-44f2-aa80-0037e7249d51\") " pod="openstack/rabbitmq-cell1-server-0" Oct 06 15:00:09 crc kubenswrapper[4757]: I1006 15:00:09.723395 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/2d9fa0d1-9bdd-44f2-aa80-0037e7249d51-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"2d9fa0d1-9bdd-44f2-aa80-0037e7249d51\") " pod="openstack/rabbitmq-cell1-server-0" Oct 06 15:00:09 crc kubenswrapper[4757]: I1006 15:00:09.725783 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/2d9fa0d1-9bdd-44f2-aa80-0037e7249d51-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"2d9fa0d1-9bdd-44f2-aa80-0037e7249d51\") " pod="openstack/rabbitmq-cell1-server-0" Oct 06 15:00:09 crc kubenswrapper[4757]: I1006 15:00:09.732847 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dtwzm\" (UniqueName: \"kubernetes.io/projected/2d9fa0d1-9bdd-44f2-aa80-0037e7249d51-kube-api-access-dtwzm\") pod \"rabbitmq-cell1-server-0\" (UID: \"2d9fa0d1-9bdd-44f2-aa80-0037e7249d51\") " pod="openstack/rabbitmq-cell1-server-0" Oct 06 15:00:09 crc kubenswrapper[4757]: I1006 15:00:09.737524 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Oct 06 15:00:09 crc kubenswrapper[4757]: I1006 15:00:09.741075 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 06 15:00:09 crc kubenswrapper[4757]: I1006 15:00:09.784800 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-cbc005a5-9898-4ef1-98dc-4fdf4e9aa648\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-cbc005a5-9898-4ef1-98dc-4fdf4e9aa648\") pod \"rabbitmq-cell1-server-0\" (UID: \"2d9fa0d1-9bdd-44f2-aa80-0037e7249d51\") " pod="openstack/rabbitmq-cell1-server-0" Oct 06 15:00:09 crc kubenswrapper[4757]: I1006 15:00:09.823585 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bb76c85cc-wvwsg" Oct 06 15:00:09 crc kubenswrapper[4757]: I1006 15:00:09.913514 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/7ee79e18-42a9-4a45-81e7-5a0ca8a6a841-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"7ee79e18-42a9-4a45-81e7-5a0ca8a6a841\") " pod="openstack/rabbitmq-server-0" Oct 06 15:00:09 crc kubenswrapper[4757]: I1006 15:00:09.913565 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/7ee79e18-42a9-4a45-81e7-5a0ca8a6a841-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"7ee79e18-42a9-4a45-81e7-5a0ca8a6a841\") " pod="openstack/rabbitmq-server-0" Oct 06 15:00:09 crc kubenswrapper[4757]: I1006 15:00:09.913666 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/7ee79e18-42a9-4a45-81e7-5a0ca8a6a841-server-conf\") pod \"rabbitmq-server-0\" (UID: \"7ee79e18-42a9-4a45-81e7-5a0ca8a6a841\") " pod="openstack/rabbitmq-server-0" Oct 06 15:00:09 crc kubenswrapper[4757]: I1006 15:00:09.913687 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7ee79e18-42a9-4a45-81e7-5a0ca8a6a841-config-data\") pod \"rabbitmq-server-0\" (UID: \"7ee79e18-42a9-4a45-81e7-5a0ca8a6a841\") " pod="openstack/rabbitmq-server-0" Oct 06 15:00:09 crc kubenswrapper[4757]: I1006 15:00:09.913702 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9mr99\" (UniqueName: \"kubernetes.io/projected/7ee79e18-42a9-4a45-81e7-5a0ca8a6a841-kube-api-access-9mr99\") pod \"rabbitmq-server-0\" (UID: \"7ee79e18-42a9-4a45-81e7-5a0ca8a6a841\") " pod="openstack/rabbitmq-server-0" Oct 06 15:00:09 crc kubenswrapper[4757]: I1006 15:00:09.913735 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-e94b737f-eafe-4f37-93ee-0568a7d5228a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e94b737f-eafe-4f37-93ee-0568a7d5228a\") pod \"rabbitmq-server-0\" (UID: \"7ee79e18-42a9-4a45-81e7-5a0ca8a6a841\") " pod="openstack/rabbitmq-server-0" Oct 06 15:00:09 crc kubenswrapper[4757]: I1006 15:00:09.913752 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/7ee79e18-42a9-4a45-81e7-5a0ca8a6a841-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"7ee79e18-42a9-4a45-81e7-5a0ca8a6a841\") " pod="openstack/rabbitmq-server-0" Oct 06 15:00:09 crc kubenswrapper[4757]: I1006 15:00:09.913769 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/7ee79e18-42a9-4a45-81e7-5a0ca8a6a841-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"7ee79e18-42a9-4a45-81e7-5a0ca8a6a841\") " pod="openstack/rabbitmq-server-0" Oct 06 15:00:09 crc kubenswrapper[4757]: I1006 15:00:09.913788 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/7ee79e18-42a9-4a45-81e7-5a0ca8a6a841-pod-info\") pod \"rabbitmq-server-0\" (UID: \"7ee79e18-42a9-4a45-81e7-5a0ca8a6a841\") " pod="openstack/rabbitmq-server-0" Oct 06 15:00:09 crc kubenswrapper[4757]: I1006 15:00:09.914042 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/7ee79e18-42a9-4a45-81e7-5a0ca8a6a841-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"7ee79e18-42a9-4a45-81e7-5a0ca8a6a841\") " pod="openstack/rabbitmq-server-0" Oct 06 15:00:09 crc kubenswrapper[4757]: I1006 15:00:09.914117 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/7ee79e18-42a9-4a45-81e7-5a0ca8a6a841-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"7ee79e18-42a9-4a45-81e7-5a0ca8a6a841\") " pod="openstack/rabbitmq-server-0" Oct 06 15:00:10 crc kubenswrapper[4757]: I1006 15:00:10.016002 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vqw54\" (UniqueName: \"kubernetes.io/projected/f6ac60de-1f61-419a-86db-ddd4bba0b2b3-kube-api-access-vqw54\") pod \"f6ac60de-1f61-419a-86db-ddd4bba0b2b3\" (UID: \"f6ac60de-1f61-419a-86db-ddd4bba0b2b3\") " Oct 06 15:00:10 crc kubenswrapper[4757]: I1006 15:00:10.016071 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f6ac60de-1f61-419a-86db-ddd4bba0b2b3-config\") pod \"f6ac60de-1f61-419a-86db-ddd4bba0b2b3\" (UID: \"f6ac60de-1f61-419a-86db-ddd4bba0b2b3\") " Oct 06 15:00:10 crc kubenswrapper[4757]: I1006 15:00:10.016515 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/7ee79e18-42a9-4a45-81e7-5a0ca8a6a841-server-conf\") pod \"rabbitmq-server-0\" (UID: \"7ee79e18-42a9-4a45-81e7-5a0ca8a6a841\") " pod="openstack/rabbitmq-server-0" Oct 06 15:00:10 crc kubenswrapper[4757]: I1006 15:00:10.016541 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7ee79e18-42a9-4a45-81e7-5a0ca8a6a841-config-data\") pod \"rabbitmq-server-0\" (UID: \"7ee79e18-42a9-4a45-81e7-5a0ca8a6a841\") " pod="openstack/rabbitmq-server-0" Oct 06 15:00:10 crc kubenswrapper[4757]: I1006 15:00:10.016560 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9mr99\" (UniqueName: \"kubernetes.io/projected/7ee79e18-42a9-4a45-81e7-5a0ca8a6a841-kube-api-access-9mr99\") pod \"rabbitmq-server-0\" (UID: \"7ee79e18-42a9-4a45-81e7-5a0ca8a6a841\") " pod="openstack/rabbitmq-server-0" Oct 06 15:00:10 crc kubenswrapper[4757]: I1006 15:00:10.016595 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-e94b737f-eafe-4f37-93ee-0568a7d5228a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e94b737f-eafe-4f37-93ee-0568a7d5228a\") pod \"rabbitmq-server-0\" (UID: \"7ee79e18-42a9-4a45-81e7-5a0ca8a6a841\") " pod="openstack/rabbitmq-server-0" Oct 06 15:00:10 crc kubenswrapper[4757]: I1006 15:00:10.016612 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/7ee79e18-42a9-4a45-81e7-5a0ca8a6a841-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"7ee79e18-42a9-4a45-81e7-5a0ca8a6a841\") " pod="openstack/rabbitmq-server-0" Oct 06 15:00:10 crc kubenswrapper[4757]: I1006 15:00:10.016630 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/7ee79e18-42a9-4a45-81e7-5a0ca8a6a841-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"7ee79e18-42a9-4a45-81e7-5a0ca8a6a841\") " pod="openstack/rabbitmq-server-0" Oct 06 15:00:10 crc kubenswrapper[4757]: I1006 15:00:10.016646 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/7ee79e18-42a9-4a45-81e7-5a0ca8a6a841-pod-info\") pod \"rabbitmq-server-0\" (UID: \"7ee79e18-42a9-4a45-81e7-5a0ca8a6a841\") " pod="openstack/rabbitmq-server-0" Oct 06 15:00:10 crc kubenswrapper[4757]: I1006 15:00:10.016667 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/7ee79e18-42a9-4a45-81e7-5a0ca8a6a841-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"7ee79e18-42a9-4a45-81e7-5a0ca8a6a841\") " pod="openstack/rabbitmq-server-0" Oct 06 15:00:10 crc kubenswrapper[4757]: I1006 15:00:10.016691 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/7ee79e18-42a9-4a45-81e7-5a0ca8a6a841-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"7ee79e18-42a9-4a45-81e7-5a0ca8a6a841\") " pod="openstack/rabbitmq-server-0" Oct 06 15:00:10 crc kubenswrapper[4757]: I1006 15:00:10.016719 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/7ee79e18-42a9-4a45-81e7-5a0ca8a6a841-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"7ee79e18-42a9-4a45-81e7-5a0ca8a6a841\") " pod="openstack/rabbitmq-server-0" Oct 06 15:00:10 crc kubenswrapper[4757]: I1006 15:00:10.016739 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/7ee79e18-42a9-4a45-81e7-5a0ca8a6a841-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"7ee79e18-42a9-4a45-81e7-5a0ca8a6a841\") " pod="openstack/rabbitmq-server-0" Oct 06 15:00:10 crc kubenswrapper[4757]: I1006 15:00:10.017672 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/7ee79e18-42a9-4a45-81e7-5a0ca8a6a841-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"7ee79e18-42a9-4a45-81e7-5a0ca8a6a841\") " pod="openstack/rabbitmq-server-0" Oct 06 15:00:10 crc kubenswrapper[4757]: I1006 15:00:10.017692 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7ee79e18-42a9-4a45-81e7-5a0ca8a6a841-config-data\") pod \"rabbitmq-server-0\" (UID: \"7ee79e18-42a9-4a45-81e7-5a0ca8a6a841\") " pod="openstack/rabbitmq-server-0" Oct 06 15:00:10 crc kubenswrapper[4757]: I1006 15:00:10.017743 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/7ee79e18-42a9-4a45-81e7-5a0ca8a6a841-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"7ee79e18-42a9-4a45-81e7-5a0ca8a6a841\") " pod="openstack/rabbitmq-server-0" Oct 06 15:00:10 crc kubenswrapper[4757]: I1006 15:00:10.018355 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/7ee79e18-42a9-4a45-81e7-5a0ca8a6a841-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"7ee79e18-42a9-4a45-81e7-5a0ca8a6a841\") " pod="openstack/rabbitmq-server-0" Oct 06 15:00:10 crc kubenswrapper[4757]: I1006 15:00:10.018711 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/7ee79e18-42a9-4a45-81e7-5a0ca8a6a841-server-conf\") pod \"rabbitmq-server-0\" (UID: \"7ee79e18-42a9-4a45-81e7-5a0ca8a6a841\") " pod="openstack/rabbitmq-server-0" Oct 06 15:00:10 crc kubenswrapper[4757]: I1006 15:00:10.019933 4757 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Oct 06 15:00:10 crc kubenswrapper[4757]: I1006 15:00:10.019962 4757 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-e94b737f-eafe-4f37-93ee-0568a7d5228a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e94b737f-eafe-4f37-93ee-0568a7d5228a\") pod \"rabbitmq-server-0\" (UID: \"7ee79e18-42a9-4a45-81e7-5a0ca8a6a841\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/3cb94c976d07b4f2fb2ae0d8389c4b97a0c7ef82bc9dd694a37eee44b292797e/globalmount\"" pod="openstack/rabbitmq-server-0" Oct 06 15:00:10 crc kubenswrapper[4757]: I1006 15:00:10.020197 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/7ee79e18-42a9-4a45-81e7-5a0ca8a6a841-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"7ee79e18-42a9-4a45-81e7-5a0ca8a6a841\") " pod="openstack/rabbitmq-server-0" Oct 06 15:00:10 crc kubenswrapper[4757]: I1006 15:00:10.021347 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f6ac60de-1f61-419a-86db-ddd4bba0b2b3-kube-api-access-vqw54" (OuterVolumeSpecName: "kube-api-access-vqw54") pod "f6ac60de-1f61-419a-86db-ddd4bba0b2b3" (UID: "f6ac60de-1f61-419a-86db-ddd4bba0b2b3"). InnerVolumeSpecName "kube-api-access-vqw54". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 15:00:10 crc kubenswrapper[4757]: I1006 15:00:10.021642 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/7ee79e18-42a9-4a45-81e7-5a0ca8a6a841-pod-info\") pod \"rabbitmq-server-0\" (UID: \"7ee79e18-42a9-4a45-81e7-5a0ca8a6a841\") " pod="openstack/rabbitmq-server-0" Oct 06 15:00:10 crc kubenswrapper[4757]: I1006 15:00:10.021991 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/7ee79e18-42a9-4a45-81e7-5a0ca8a6a841-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"7ee79e18-42a9-4a45-81e7-5a0ca8a6a841\") " pod="openstack/rabbitmq-server-0" Oct 06 15:00:10 crc kubenswrapper[4757]: I1006 15:00:10.024153 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/7ee79e18-42a9-4a45-81e7-5a0ca8a6a841-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"7ee79e18-42a9-4a45-81e7-5a0ca8a6a841\") " pod="openstack/rabbitmq-server-0" Oct 06 15:00:10 crc kubenswrapper[4757]: I1006 15:00:10.033889 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9mr99\" (UniqueName: \"kubernetes.io/projected/7ee79e18-42a9-4a45-81e7-5a0ca8a6a841-kube-api-access-9mr99\") pod \"rabbitmq-server-0\" (UID: \"7ee79e18-42a9-4a45-81e7-5a0ca8a6a841\") " pod="openstack/rabbitmq-server-0" Oct 06 15:00:10 crc kubenswrapper[4757]: I1006 15:00:10.051658 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-e94b737f-eafe-4f37-93ee-0568a7d5228a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e94b737f-eafe-4f37-93ee-0568a7d5228a\") pod \"rabbitmq-server-0\" (UID: \"7ee79e18-42a9-4a45-81e7-5a0ca8a6a841\") " pod="openstack/rabbitmq-server-0" Oct 06 15:00:10 crc kubenswrapper[4757]: I1006 15:00:10.060654 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f6ac60de-1f61-419a-86db-ddd4bba0b2b3-config" (OuterVolumeSpecName: "config") pod "f6ac60de-1f61-419a-86db-ddd4bba0b2b3" (UID: "f6ac60de-1f61-419a-86db-ddd4bba0b2b3"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 15:00:10 crc kubenswrapper[4757]: I1006 15:00:10.078646 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 06 15:00:10 crc kubenswrapper[4757]: I1006 15:00:10.118207 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vqw54\" (UniqueName: \"kubernetes.io/projected/f6ac60de-1f61-419a-86db-ddd4bba0b2b3-kube-api-access-vqw54\") on node \"crc\" DevicePath \"\"" Oct 06 15:00:10 crc kubenswrapper[4757]: I1006 15:00:10.118257 4757 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f6ac60de-1f61-419a-86db-ddd4bba0b2b3-config\") on node \"crc\" DevicePath \"\"" Oct 06 15:00:10 crc kubenswrapper[4757]: I1006 15:00:10.198834 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ba4ff376-b636-4f20-8167-916c0b2d2a22" path="/var/lib/kubelet/pods/ba4ff376-b636-4f20-8167-916c0b2d2a22/volumes" Oct 06 15:00:10 crc kubenswrapper[4757]: I1006 15:00:10.331347 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 06 15:00:10 crc kubenswrapper[4757]: W1006 15:00:10.333209 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2d9fa0d1_9bdd_44f2_aa80_0037e7249d51.slice/crio-d0981f7274cbfd12f39272ff0c618dc26726edef92561711c24b440b1e642cf2 WatchSource:0}: Error finding container d0981f7274cbfd12f39272ff0c618dc26726edef92561711c24b440b1e642cf2: Status 404 returned error can't find the container with id d0981f7274cbfd12f39272ff0c618dc26726edef92561711c24b440b1e642cf2 Oct 06 15:00:10 crc kubenswrapper[4757]: I1006 15:00:10.344588 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 06 15:00:11 crc kubenswrapper[4757]: I1006 15:00:10.524318 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74f54d5d49-cwx7t" event={"ID":"f866e41e-f393-40c6-9b82-c0e4eaeb4283","Type":"ContainerStarted","Data":"181d98753ace49d0efebf79c4afc332d15f59e1c132d91d483ed5ad0453ac2a8"} Oct 06 15:00:11 crc kubenswrapper[4757]: I1006 15:00:10.524735 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-74f54d5d49-cwx7t" Oct 06 15:00:11 crc kubenswrapper[4757]: I1006 15:00:10.527493 4757 generic.go:334] "Generic (PLEG): container finished" podID="ec6b46ec-9e74-4432-992c-e5a92a8d38e3" containerID="553995ace36a30f6b134e1b91667dc6dbff12672f1243ef6e7ee6d6c8bc14e6d" exitCode=0 Oct 06 15:00:11 crc kubenswrapper[4757]: I1006 15:00:10.527541 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5cc4f76bbf-f8knj" event={"ID":"ec6b46ec-9e74-4432-992c-e5a92a8d38e3","Type":"ContainerDied","Data":"553995ace36a30f6b134e1b91667dc6dbff12672f1243ef6e7ee6d6c8bc14e6d"} Oct 06 15:00:11 crc kubenswrapper[4757]: I1006 15:00:10.531513 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bb76c85cc-wvwsg" Oct 06 15:00:11 crc kubenswrapper[4757]: I1006 15:00:10.531490 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bb76c85cc-wvwsg" event={"ID":"f6ac60de-1f61-419a-86db-ddd4bba0b2b3","Type":"ContainerDied","Data":"37e8d846d8a3e47f4739b2be5bb6d70c78a0b497e605115564bfb16d4ce48c9a"} Oct 06 15:00:11 crc kubenswrapper[4757]: I1006 15:00:10.531630 4757 scope.go:117] "RemoveContainer" containerID="0e7fc0600a8f23efcd335ea5c63494a58aeb3da4bcf48d24a668de2f0d1a8cae" Oct 06 15:00:11 crc kubenswrapper[4757]: I1006 15:00:10.533363 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"2d9fa0d1-9bdd-44f2-aa80-0037e7249d51","Type":"ContainerStarted","Data":"d0981f7274cbfd12f39272ff0c618dc26726edef92561711c24b440b1e642cf2"} Oct 06 15:00:11 crc kubenswrapper[4757]: I1006 15:00:10.541886 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-74f54d5d49-cwx7t" podStartSLOduration=2.541869722 podStartE2EDuration="2.541869722s" podCreationTimestamp="2025-10-06 15:00:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 15:00:10.540439515 +0000 UTC m=+4899.037758062" watchObservedRunningTime="2025-10-06 15:00:10.541869722 +0000 UTC m=+4899.039188259" Oct 06 15:00:11 crc kubenswrapper[4757]: I1006 15:00:10.602068 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-bb76c85cc-wvwsg"] Oct 06 15:00:11 crc kubenswrapper[4757]: I1006 15:00:10.607616 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-bb76c85cc-wvwsg"] Oct 06 15:00:11 crc kubenswrapper[4757]: W1006 15:00:10.783054 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7ee79e18_42a9_4a45_81e7_5a0ca8a6a841.slice/crio-fee9be4d2079a615d391a716f8ad1459ebdf12ee0d818dd61a60b415abec1320 WatchSource:0}: Error finding container fee9be4d2079a615d391a716f8ad1459ebdf12ee0d818dd61a60b415abec1320: Status 404 returned error can't find the container with id fee9be4d2079a615d391a716f8ad1459ebdf12ee0d818dd61a60b415abec1320 Oct 06 15:00:11 crc kubenswrapper[4757]: I1006 15:00:10.784278 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 06 15:00:11 crc kubenswrapper[4757]: I1006 15:00:10.883808 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Oct 06 15:00:11 crc kubenswrapper[4757]: E1006 15:00:10.884110 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f6ac60de-1f61-419a-86db-ddd4bba0b2b3" containerName="init" Oct 06 15:00:11 crc kubenswrapper[4757]: I1006 15:00:10.884122 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="f6ac60de-1f61-419a-86db-ddd4bba0b2b3" containerName="init" Oct 06 15:00:11 crc kubenswrapper[4757]: I1006 15:00:10.884258 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="f6ac60de-1f61-419a-86db-ddd4bba0b2b3" containerName="init" Oct 06 15:00:11 crc kubenswrapper[4757]: I1006 15:00:10.884991 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Oct 06 15:00:11 crc kubenswrapper[4757]: I1006 15:00:10.886899 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-r9xp5" Oct 06 15:00:11 crc kubenswrapper[4757]: I1006 15:00:10.886927 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Oct 06 15:00:11 crc kubenswrapper[4757]: I1006 15:00:10.887363 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Oct 06 15:00:11 crc kubenswrapper[4757]: I1006 15:00:10.887935 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Oct 06 15:00:11 crc kubenswrapper[4757]: I1006 15:00:10.898177 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Oct 06 15:00:11 crc kubenswrapper[4757]: I1006 15:00:10.903794 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Oct 06 15:00:11 crc kubenswrapper[4757]: I1006 15:00:10.906309 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Oct 06 15:00:11 crc kubenswrapper[4757]: I1006 15:00:11.032357 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/34dcf335-306d-480f-ac3e-cee391886957-kolla-config\") pod \"openstack-galera-0\" (UID: \"34dcf335-306d-480f-ac3e-cee391886957\") " pod="openstack/openstack-galera-0" Oct 06 15:00:11 crc kubenswrapper[4757]: I1006 15:00:11.032415 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/34dcf335-306d-480f-ac3e-cee391886957-config-data-default\") pod \"openstack-galera-0\" (UID: \"34dcf335-306d-480f-ac3e-cee391886957\") " pod="openstack/openstack-galera-0" Oct 06 15:00:11 crc kubenswrapper[4757]: I1006 15:00:11.032440 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/34dcf335-306d-480f-ac3e-cee391886957-operator-scripts\") pod \"openstack-galera-0\" (UID: \"34dcf335-306d-480f-ac3e-cee391886957\") " pod="openstack/openstack-galera-0" Oct 06 15:00:11 crc kubenswrapper[4757]: I1006 15:00:11.032593 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/34dcf335-306d-480f-ac3e-cee391886957-config-data-generated\") pod \"openstack-galera-0\" (UID: \"34dcf335-306d-480f-ac3e-cee391886957\") " pod="openstack/openstack-galera-0" Oct 06 15:00:11 crc kubenswrapper[4757]: I1006 15:00:11.032651 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/34dcf335-306d-480f-ac3e-cee391886957-secrets\") pod \"openstack-galera-0\" (UID: \"34dcf335-306d-480f-ac3e-cee391886957\") " pod="openstack/openstack-galera-0" Oct 06 15:00:11 crc kubenswrapper[4757]: I1006 15:00:11.032671 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-btrmg\" (UniqueName: \"kubernetes.io/projected/34dcf335-306d-480f-ac3e-cee391886957-kube-api-access-btrmg\") pod \"openstack-galera-0\" (UID: \"34dcf335-306d-480f-ac3e-cee391886957\") " pod="openstack/openstack-galera-0" Oct 06 15:00:11 crc kubenswrapper[4757]: I1006 15:00:11.032719 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/34dcf335-306d-480f-ac3e-cee391886957-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"34dcf335-306d-480f-ac3e-cee391886957\") " pod="openstack/openstack-galera-0" Oct 06 15:00:11 crc kubenswrapper[4757]: I1006 15:00:11.032809 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-c12416b7-b97f-4fdf-ba55-359a0078e86d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c12416b7-b97f-4fdf-ba55-359a0078e86d\") pod \"openstack-galera-0\" (UID: \"34dcf335-306d-480f-ac3e-cee391886957\") " pod="openstack/openstack-galera-0" Oct 06 15:00:11 crc kubenswrapper[4757]: I1006 15:00:11.032876 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/34dcf335-306d-480f-ac3e-cee391886957-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"34dcf335-306d-480f-ac3e-cee391886957\") " pod="openstack/openstack-galera-0" Oct 06 15:00:11 crc kubenswrapper[4757]: I1006 15:00:11.134426 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/34dcf335-306d-480f-ac3e-cee391886957-config-data-default\") pod \"openstack-galera-0\" (UID: \"34dcf335-306d-480f-ac3e-cee391886957\") " pod="openstack/openstack-galera-0" Oct 06 15:00:11 crc kubenswrapper[4757]: I1006 15:00:11.134844 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/34dcf335-306d-480f-ac3e-cee391886957-operator-scripts\") pod \"openstack-galera-0\" (UID: \"34dcf335-306d-480f-ac3e-cee391886957\") " pod="openstack/openstack-galera-0" Oct 06 15:00:11 crc kubenswrapper[4757]: I1006 15:00:11.134884 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/34dcf335-306d-480f-ac3e-cee391886957-config-data-generated\") pod \"openstack-galera-0\" (UID: \"34dcf335-306d-480f-ac3e-cee391886957\") " pod="openstack/openstack-galera-0" Oct 06 15:00:11 crc kubenswrapper[4757]: I1006 15:00:11.134910 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/34dcf335-306d-480f-ac3e-cee391886957-secrets\") pod \"openstack-galera-0\" (UID: \"34dcf335-306d-480f-ac3e-cee391886957\") " pod="openstack/openstack-galera-0" Oct 06 15:00:11 crc kubenswrapper[4757]: I1006 15:00:11.134940 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-btrmg\" (UniqueName: \"kubernetes.io/projected/34dcf335-306d-480f-ac3e-cee391886957-kube-api-access-btrmg\") pod \"openstack-galera-0\" (UID: \"34dcf335-306d-480f-ac3e-cee391886957\") " pod="openstack/openstack-galera-0" Oct 06 15:00:11 crc kubenswrapper[4757]: I1006 15:00:11.134962 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/34dcf335-306d-480f-ac3e-cee391886957-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"34dcf335-306d-480f-ac3e-cee391886957\") " pod="openstack/openstack-galera-0" Oct 06 15:00:11 crc kubenswrapper[4757]: I1006 15:00:11.135013 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-c12416b7-b97f-4fdf-ba55-359a0078e86d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c12416b7-b97f-4fdf-ba55-359a0078e86d\") pod \"openstack-galera-0\" (UID: \"34dcf335-306d-480f-ac3e-cee391886957\") " pod="openstack/openstack-galera-0" Oct 06 15:00:11 crc kubenswrapper[4757]: I1006 15:00:11.135044 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/34dcf335-306d-480f-ac3e-cee391886957-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"34dcf335-306d-480f-ac3e-cee391886957\") " pod="openstack/openstack-galera-0" Oct 06 15:00:11 crc kubenswrapper[4757]: I1006 15:00:11.135115 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/34dcf335-306d-480f-ac3e-cee391886957-kolla-config\") pod \"openstack-galera-0\" (UID: \"34dcf335-306d-480f-ac3e-cee391886957\") " pod="openstack/openstack-galera-0" Oct 06 15:00:11 crc kubenswrapper[4757]: I1006 15:00:11.135375 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/34dcf335-306d-480f-ac3e-cee391886957-config-data-generated\") pod \"openstack-galera-0\" (UID: \"34dcf335-306d-480f-ac3e-cee391886957\") " pod="openstack/openstack-galera-0" Oct 06 15:00:11 crc kubenswrapper[4757]: I1006 15:00:11.136069 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/34dcf335-306d-480f-ac3e-cee391886957-kolla-config\") pod \"openstack-galera-0\" (UID: \"34dcf335-306d-480f-ac3e-cee391886957\") " pod="openstack/openstack-galera-0" Oct 06 15:00:11 crc kubenswrapper[4757]: I1006 15:00:11.136460 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/34dcf335-306d-480f-ac3e-cee391886957-config-data-default\") pod \"openstack-galera-0\" (UID: \"34dcf335-306d-480f-ac3e-cee391886957\") " pod="openstack/openstack-galera-0" Oct 06 15:00:11 crc kubenswrapper[4757]: I1006 15:00:11.136506 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/34dcf335-306d-480f-ac3e-cee391886957-operator-scripts\") pod \"openstack-galera-0\" (UID: \"34dcf335-306d-480f-ac3e-cee391886957\") " pod="openstack/openstack-galera-0" Oct 06 15:00:11 crc kubenswrapper[4757]: I1006 15:00:11.141171 4757 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Oct 06 15:00:11 crc kubenswrapper[4757]: I1006 15:00:11.141215 4757 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-c12416b7-b97f-4fdf-ba55-359a0078e86d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c12416b7-b97f-4fdf-ba55-359a0078e86d\") pod \"openstack-galera-0\" (UID: \"34dcf335-306d-480f-ac3e-cee391886957\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/072468ff0d6ed5e35ac77f8b02ca3fc1f704019f319763acc2a868029596a115/globalmount\"" pod="openstack/openstack-galera-0" Oct 06 15:00:11 crc kubenswrapper[4757]: I1006 15:00:11.145406 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/34dcf335-306d-480f-ac3e-cee391886957-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"34dcf335-306d-480f-ac3e-cee391886957\") " pod="openstack/openstack-galera-0" Oct 06 15:00:11 crc kubenswrapper[4757]: I1006 15:00:11.146632 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/34dcf335-306d-480f-ac3e-cee391886957-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"34dcf335-306d-480f-ac3e-cee391886957\") " pod="openstack/openstack-galera-0" Oct 06 15:00:11 crc kubenswrapper[4757]: I1006 15:00:11.157236 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/34dcf335-306d-480f-ac3e-cee391886957-secrets\") pod \"openstack-galera-0\" (UID: \"34dcf335-306d-480f-ac3e-cee391886957\") " pod="openstack/openstack-galera-0" Oct 06 15:00:11 crc kubenswrapper[4757]: I1006 15:00:11.247596 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-btrmg\" (UniqueName: \"kubernetes.io/projected/34dcf335-306d-480f-ac3e-cee391886957-kube-api-access-btrmg\") pod \"openstack-galera-0\" (UID: \"34dcf335-306d-480f-ac3e-cee391886957\") " pod="openstack/openstack-galera-0" Oct 06 15:00:11 crc kubenswrapper[4757]: I1006 15:00:11.452900 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-c12416b7-b97f-4fdf-ba55-359a0078e86d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c12416b7-b97f-4fdf-ba55-359a0078e86d\") pod \"openstack-galera-0\" (UID: \"34dcf335-306d-480f-ac3e-cee391886957\") " pod="openstack/openstack-galera-0" Oct 06 15:00:11 crc kubenswrapper[4757]: I1006 15:00:11.505770 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Oct 06 15:00:11 crc kubenswrapper[4757]: I1006 15:00:11.541648 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"7ee79e18-42a9-4a45-81e7-5a0ca8a6a841","Type":"ContainerStarted","Data":"fee9be4d2079a615d391a716f8ad1459ebdf12ee0d818dd61a60b415abec1320"} Oct 06 15:00:11 crc kubenswrapper[4757]: I1006 15:00:11.546247 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5cc4f76bbf-f8knj" event={"ID":"ec6b46ec-9e74-4432-992c-e5a92a8d38e3","Type":"ContainerStarted","Data":"1c3cc55e334173778a603da46bc6dfb0988e11216491656f517c5fb341c8f5af"} Oct 06 15:00:11 crc kubenswrapper[4757]: I1006 15:00:11.546296 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5cc4f76bbf-f8knj" Oct 06 15:00:11 crc kubenswrapper[4757]: I1006 15:00:11.916013 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5cc4f76bbf-f8knj" podStartSLOduration=3.915991257 podStartE2EDuration="3.915991257s" podCreationTimestamp="2025-10-06 15:00:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 15:00:11.576504137 +0000 UTC m=+4900.073822674" watchObservedRunningTime="2025-10-06 15:00:11.915991257 +0000 UTC m=+4900.413309804" Oct 06 15:00:11 crc kubenswrapper[4757]: I1006 15:00:11.924455 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Oct 06 15:00:12 crc kubenswrapper[4757]: I1006 15:00:12.178037 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Oct 06 15:00:12 crc kubenswrapper[4757]: I1006 15:00:12.179493 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Oct 06 15:00:12 crc kubenswrapper[4757]: I1006 15:00:12.181847 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Oct 06 15:00:12 crc kubenswrapper[4757]: I1006 15:00:12.181890 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-thcfq" Oct 06 15:00:12 crc kubenswrapper[4757]: I1006 15:00:12.182073 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Oct 06 15:00:12 crc kubenswrapper[4757]: I1006 15:00:12.182347 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Oct 06 15:00:12 crc kubenswrapper[4757]: I1006 15:00:12.192640 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f6ac60de-1f61-419a-86db-ddd4bba0b2b3" path="/var/lib/kubelet/pods/f6ac60de-1f61-419a-86db-ddd4bba0b2b3/volumes" Oct 06 15:00:12 crc kubenswrapper[4757]: I1006 15:00:12.203683 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Oct 06 15:00:12 crc kubenswrapper[4757]: I1006 15:00:12.354687 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/ca425bb4-93e3-4d5c-bf23-d7d8f8bb1c8e-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"ca425bb4-93e3-4d5c-bf23-d7d8f8bb1c8e\") " pod="openstack/openstack-cell1-galera-0" Oct 06 15:00:12 crc kubenswrapper[4757]: I1006 15:00:12.355928 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/ca425bb4-93e3-4d5c-bf23-d7d8f8bb1c8e-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"ca425bb4-93e3-4d5c-bf23-d7d8f8bb1c8e\") " pod="openstack/openstack-cell1-galera-0" Oct 06 15:00:12 crc kubenswrapper[4757]: I1006 15:00:12.356011 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hjw5f\" (UniqueName: \"kubernetes.io/projected/ca425bb4-93e3-4d5c-bf23-d7d8f8bb1c8e-kube-api-access-hjw5f\") pod \"openstack-cell1-galera-0\" (UID: \"ca425bb4-93e3-4d5c-bf23-d7d8f8bb1c8e\") " pod="openstack/openstack-cell1-galera-0" Oct 06 15:00:12 crc kubenswrapper[4757]: I1006 15:00:12.356108 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ca425bb4-93e3-4d5c-bf23-d7d8f8bb1c8e-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"ca425bb4-93e3-4d5c-bf23-d7d8f8bb1c8e\") " pod="openstack/openstack-cell1-galera-0" Oct 06 15:00:12 crc kubenswrapper[4757]: I1006 15:00:12.356229 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/ca425bb4-93e3-4d5c-bf23-d7d8f8bb1c8e-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"ca425bb4-93e3-4d5c-bf23-d7d8f8bb1c8e\") " pod="openstack/openstack-cell1-galera-0" Oct 06 15:00:12 crc kubenswrapper[4757]: I1006 15:00:12.356272 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/ca425bb4-93e3-4d5c-bf23-d7d8f8bb1c8e-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"ca425bb4-93e3-4d5c-bf23-d7d8f8bb1c8e\") " pod="openstack/openstack-cell1-galera-0" Oct 06 15:00:12 crc kubenswrapper[4757]: I1006 15:00:12.356298 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca425bb4-93e3-4d5c-bf23-d7d8f8bb1c8e-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"ca425bb4-93e3-4d5c-bf23-d7d8f8bb1c8e\") " pod="openstack/openstack-cell1-galera-0" Oct 06 15:00:12 crc kubenswrapper[4757]: I1006 15:00:12.356499 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/ca425bb4-93e3-4d5c-bf23-d7d8f8bb1c8e-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"ca425bb4-93e3-4d5c-bf23-d7d8f8bb1c8e\") " pod="openstack/openstack-cell1-galera-0" Oct 06 15:00:12 crc kubenswrapper[4757]: I1006 15:00:12.356588 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-c7c2cc50-2fa5-4568-ac39-8dd5e6eaeb6f\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c7c2cc50-2fa5-4568-ac39-8dd5e6eaeb6f\") pod \"openstack-cell1-galera-0\" (UID: \"ca425bb4-93e3-4d5c-bf23-d7d8f8bb1c8e\") " pod="openstack/openstack-cell1-galera-0" Oct 06 15:00:12 crc kubenswrapper[4757]: I1006 15:00:12.457699 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca425bb4-93e3-4d5c-bf23-d7d8f8bb1c8e-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"ca425bb4-93e3-4d5c-bf23-d7d8f8bb1c8e\") " pod="openstack/openstack-cell1-galera-0" Oct 06 15:00:12 crc kubenswrapper[4757]: I1006 15:00:12.457822 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/ca425bb4-93e3-4d5c-bf23-d7d8f8bb1c8e-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"ca425bb4-93e3-4d5c-bf23-d7d8f8bb1c8e\") " pod="openstack/openstack-cell1-galera-0" Oct 06 15:00:12 crc kubenswrapper[4757]: I1006 15:00:12.457880 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-c7c2cc50-2fa5-4568-ac39-8dd5e6eaeb6f\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c7c2cc50-2fa5-4568-ac39-8dd5e6eaeb6f\") pod \"openstack-cell1-galera-0\" (UID: \"ca425bb4-93e3-4d5c-bf23-d7d8f8bb1c8e\") " pod="openstack/openstack-cell1-galera-0" Oct 06 15:00:12 crc kubenswrapper[4757]: I1006 15:00:12.457934 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/ca425bb4-93e3-4d5c-bf23-d7d8f8bb1c8e-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"ca425bb4-93e3-4d5c-bf23-d7d8f8bb1c8e\") " pod="openstack/openstack-cell1-galera-0" Oct 06 15:00:12 crc kubenswrapper[4757]: I1006 15:00:12.458000 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/ca425bb4-93e3-4d5c-bf23-d7d8f8bb1c8e-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"ca425bb4-93e3-4d5c-bf23-d7d8f8bb1c8e\") " pod="openstack/openstack-cell1-galera-0" Oct 06 15:00:12 crc kubenswrapper[4757]: I1006 15:00:12.458039 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hjw5f\" (UniqueName: \"kubernetes.io/projected/ca425bb4-93e3-4d5c-bf23-d7d8f8bb1c8e-kube-api-access-hjw5f\") pod \"openstack-cell1-galera-0\" (UID: \"ca425bb4-93e3-4d5c-bf23-d7d8f8bb1c8e\") " pod="openstack/openstack-cell1-galera-0" Oct 06 15:00:12 crc kubenswrapper[4757]: I1006 15:00:12.458084 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ca425bb4-93e3-4d5c-bf23-d7d8f8bb1c8e-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"ca425bb4-93e3-4d5c-bf23-d7d8f8bb1c8e\") " pod="openstack/openstack-cell1-galera-0" Oct 06 15:00:12 crc kubenswrapper[4757]: I1006 15:00:12.458173 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/ca425bb4-93e3-4d5c-bf23-d7d8f8bb1c8e-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"ca425bb4-93e3-4d5c-bf23-d7d8f8bb1c8e\") " pod="openstack/openstack-cell1-galera-0" Oct 06 15:00:12 crc kubenswrapper[4757]: I1006 15:00:12.458211 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/ca425bb4-93e3-4d5c-bf23-d7d8f8bb1c8e-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"ca425bb4-93e3-4d5c-bf23-d7d8f8bb1c8e\") " pod="openstack/openstack-cell1-galera-0" Oct 06 15:00:12 crc kubenswrapper[4757]: I1006 15:00:12.458786 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/ca425bb4-93e3-4d5c-bf23-d7d8f8bb1c8e-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"ca425bb4-93e3-4d5c-bf23-d7d8f8bb1c8e\") " pod="openstack/openstack-cell1-galera-0" Oct 06 15:00:12 crc kubenswrapper[4757]: I1006 15:00:12.458902 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/ca425bb4-93e3-4d5c-bf23-d7d8f8bb1c8e-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"ca425bb4-93e3-4d5c-bf23-d7d8f8bb1c8e\") " pod="openstack/openstack-cell1-galera-0" Oct 06 15:00:12 crc kubenswrapper[4757]: I1006 15:00:12.459029 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/ca425bb4-93e3-4d5c-bf23-d7d8f8bb1c8e-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"ca425bb4-93e3-4d5c-bf23-d7d8f8bb1c8e\") " pod="openstack/openstack-cell1-galera-0" Oct 06 15:00:12 crc kubenswrapper[4757]: I1006 15:00:12.459373 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ca425bb4-93e3-4d5c-bf23-d7d8f8bb1c8e-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"ca425bb4-93e3-4d5c-bf23-d7d8f8bb1c8e\") " pod="openstack/openstack-cell1-galera-0" Oct 06 15:00:12 crc kubenswrapper[4757]: I1006 15:00:12.461524 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca425bb4-93e3-4d5c-bf23-d7d8f8bb1c8e-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"ca425bb4-93e3-4d5c-bf23-d7d8f8bb1c8e\") " pod="openstack/openstack-cell1-galera-0" Oct 06 15:00:12 crc kubenswrapper[4757]: I1006 15:00:12.462052 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/ca425bb4-93e3-4d5c-bf23-d7d8f8bb1c8e-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"ca425bb4-93e3-4d5c-bf23-d7d8f8bb1c8e\") " pod="openstack/openstack-cell1-galera-0" Oct 06 15:00:12 crc kubenswrapper[4757]: I1006 15:00:12.464036 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/ca425bb4-93e3-4d5c-bf23-d7d8f8bb1c8e-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"ca425bb4-93e3-4d5c-bf23-d7d8f8bb1c8e\") " pod="openstack/openstack-cell1-galera-0" Oct 06 15:00:12 crc kubenswrapper[4757]: I1006 15:00:12.464135 4757 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Oct 06 15:00:12 crc kubenswrapper[4757]: I1006 15:00:12.464168 4757 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-c7c2cc50-2fa5-4568-ac39-8dd5e6eaeb6f\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c7c2cc50-2fa5-4568-ac39-8dd5e6eaeb6f\") pod \"openstack-cell1-galera-0\" (UID: \"ca425bb4-93e3-4d5c-bf23-d7d8f8bb1c8e\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/0b4cf2fd6660e30f93cb81d97ea2ea51b3b9b051ea6bf43e6906bf4b336f38b2/globalmount\"" pod="openstack/openstack-cell1-galera-0" Oct 06 15:00:12 crc kubenswrapper[4757]: I1006 15:00:12.486564 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Oct 06 15:00:12 crc kubenswrapper[4757]: I1006 15:00:12.488504 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Oct 06 15:00:12 crc kubenswrapper[4757]: I1006 15:00:12.489924 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hjw5f\" (UniqueName: \"kubernetes.io/projected/ca425bb4-93e3-4d5c-bf23-d7d8f8bb1c8e-kube-api-access-hjw5f\") pod \"openstack-cell1-galera-0\" (UID: \"ca425bb4-93e3-4d5c-bf23-d7d8f8bb1c8e\") " pod="openstack/openstack-cell1-galera-0" Oct 06 15:00:12 crc kubenswrapper[4757]: I1006 15:00:12.491481 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Oct 06 15:00:12 crc kubenswrapper[4757]: I1006 15:00:12.491649 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-f22hf" Oct 06 15:00:12 crc kubenswrapper[4757]: I1006 15:00:12.491788 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-memcached-svc" Oct 06 15:00:12 crc kubenswrapper[4757]: I1006 15:00:12.503637 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Oct 06 15:00:12 crc kubenswrapper[4757]: I1006 15:00:12.509349 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-c7c2cc50-2fa5-4568-ac39-8dd5e6eaeb6f\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c7c2cc50-2fa5-4568-ac39-8dd5e6eaeb6f\") pod \"openstack-cell1-galera-0\" (UID: \"ca425bb4-93e3-4d5c-bf23-d7d8f8bb1c8e\") " pod="openstack/openstack-cell1-galera-0" Oct 06 15:00:12 crc kubenswrapper[4757]: I1006 15:00:12.551475 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"34dcf335-306d-480f-ac3e-cee391886957","Type":"ContainerStarted","Data":"ed7b66266f620e8ea41a9075eb5565db34701eff501c89cae1cf085de5aa9609"} Oct 06 15:00:12 crc kubenswrapper[4757]: I1006 15:00:12.551513 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"34dcf335-306d-480f-ac3e-cee391886957","Type":"ContainerStarted","Data":"728afa5042158a995ed429ef0238edd2b357f251e481b3dc2a27842e423fdde7"} Oct 06 15:00:12 crc kubenswrapper[4757]: I1006 15:00:12.553277 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"7ee79e18-42a9-4a45-81e7-5a0ca8a6a841","Type":"ContainerStarted","Data":"4514b567d73b3f73b637b17918f148b353d48d9811720eb1a8d54b5ca320d449"} Oct 06 15:00:12 crc kubenswrapper[4757]: I1006 15:00:12.554598 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"2d9fa0d1-9bdd-44f2-aa80-0037e7249d51","Type":"ContainerStarted","Data":"a872d004b4d06df5aa1bdcccab07ae0d9ebf46963a551bddc81a482db28c1328"} Oct 06 15:00:12 crc kubenswrapper[4757]: I1006 15:00:12.660755 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/beb86ee0-f17c-4c14-8e47-5dca77324eaa-memcached-tls-certs\") pod \"memcached-0\" (UID: \"beb86ee0-f17c-4c14-8e47-5dca77324eaa\") " pod="openstack/memcached-0" Oct 06 15:00:12 crc kubenswrapper[4757]: I1006 15:00:12.660849 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/beb86ee0-f17c-4c14-8e47-5dca77324eaa-config-data\") pod \"memcached-0\" (UID: \"beb86ee0-f17c-4c14-8e47-5dca77324eaa\") " pod="openstack/memcached-0" Oct 06 15:00:12 crc kubenswrapper[4757]: I1006 15:00:12.661307 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bx52x\" (UniqueName: \"kubernetes.io/projected/beb86ee0-f17c-4c14-8e47-5dca77324eaa-kube-api-access-bx52x\") pod \"memcached-0\" (UID: \"beb86ee0-f17c-4c14-8e47-5dca77324eaa\") " pod="openstack/memcached-0" Oct 06 15:00:12 crc kubenswrapper[4757]: I1006 15:00:12.661701 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/beb86ee0-f17c-4c14-8e47-5dca77324eaa-kolla-config\") pod \"memcached-0\" (UID: \"beb86ee0-f17c-4c14-8e47-5dca77324eaa\") " pod="openstack/memcached-0" Oct 06 15:00:12 crc kubenswrapper[4757]: I1006 15:00:12.661738 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/beb86ee0-f17c-4c14-8e47-5dca77324eaa-combined-ca-bundle\") pod \"memcached-0\" (UID: \"beb86ee0-f17c-4c14-8e47-5dca77324eaa\") " pod="openstack/memcached-0" Oct 06 15:00:12 crc kubenswrapper[4757]: I1006 15:00:12.763284 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/beb86ee0-f17c-4c14-8e47-5dca77324eaa-memcached-tls-certs\") pod \"memcached-0\" (UID: \"beb86ee0-f17c-4c14-8e47-5dca77324eaa\") " pod="openstack/memcached-0" Oct 06 15:00:12 crc kubenswrapper[4757]: I1006 15:00:12.763350 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/beb86ee0-f17c-4c14-8e47-5dca77324eaa-config-data\") pod \"memcached-0\" (UID: \"beb86ee0-f17c-4c14-8e47-5dca77324eaa\") " pod="openstack/memcached-0" Oct 06 15:00:12 crc kubenswrapper[4757]: I1006 15:00:12.763396 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bx52x\" (UniqueName: \"kubernetes.io/projected/beb86ee0-f17c-4c14-8e47-5dca77324eaa-kube-api-access-bx52x\") pod \"memcached-0\" (UID: \"beb86ee0-f17c-4c14-8e47-5dca77324eaa\") " pod="openstack/memcached-0" Oct 06 15:00:12 crc kubenswrapper[4757]: I1006 15:00:12.763434 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/beb86ee0-f17c-4c14-8e47-5dca77324eaa-kolla-config\") pod \"memcached-0\" (UID: \"beb86ee0-f17c-4c14-8e47-5dca77324eaa\") " pod="openstack/memcached-0" Oct 06 15:00:12 crc kubenswrapper[4757]: I1006 15:00:12.763474 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/beb86ee0-f17c-4c14-8e47-5dca77324eaa-combined-ca-bundle\") pod \"memcached-0\" (UID: \"beb86ee0-f17c-4c14-8e47-5dca77324eaa\") " pod="openstack/memcached-0" Oct 06 15:00:12 crc kubenswrapper[4757]: I1006 15:00:12.764784 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/beb86ee0-f17c-4c14-8e47-5dca77324eaa-config-data\") pod \"memcached-0\" (UID: \"beb86ee0-f17c-4c14-8e47-5dca77324eaa\") " pod="openstack/memcached-0" Oct 06 15:00:12 crc kubenswrapper[4757]: I1006 15:00:12.765064 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/beb86ee0-f17c-4c14-8e47-5dca77324eaa-kolla-config\") pod \"memcached-0\" (UID: \"beb86ee0-f17c-4c14-8e47-5dca77324eaa\") " pod="openstack/memcached-0" Oct 06 15:00:12 crc kubenswrapper[4757]: I1006 15:00:12.767667 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/beb86ee0-f17c-4c14-8e47-5dca77324eaa-memcached-tls-certs\") pod \"memcached-0\" (UID: \"beb86ee0-f17c-4c14-8e47-5dca77324eaa\") " pod="openstack/memcached-0" Oct 06 15:00:12 crc kubenswrapper[4757]: I1006 15:00:12.769080 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/beb86ee0-f17c-4c14-8e47-5dca77324eaa-combined-ca-bundle\") pod \"memcached-0\" (UID: \"beb86ee0-f17c-4c14-8e47-5dca77324eaa\") " pod="openstack/memcached-0" Oct 06 15:00:12 crc kubenswrapper[4757]: I1006 15:00:12.784324 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bx52x\" (UniqueName: \"kubernetes.io/projected/beb86ee0-f17c-4c14-8e47-5dca77324eaa-kube-api-access-bx52x\") pod \"memcached-0\" (UID: \"beb86ee0-f17c-4c14-8e47-5dca77324eaa\") " pod="openstack/memcached-0" Oct 06 15:00:12 crc kubenswrapper[4757]: I1006 15:00:12.803364 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Oct 06 15:00:12 crc kubenswrapper[4757]: I1006 15:00:12.843403 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Oct 06 15:00:13 crc kubenswrapper[4757]: W1006 15:00:13.251582 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podca425bb4_93e3_4d5c_bf23_d7d8f8bb1c8e.slice/crio-b9c3681bab243712f0db7ba10096a219844928f17b08d3a5064d1a07b51e83e6 WatchSource:0}: Error finding container b9c3681bab243712f0db7ba10096a219844928f17b08d3a5064d1a07b51e83e6: Status 404 returned error can't find the container with id b9c3681bab243712f0db7ba10096a219844928f17b08d3a5064d1a07b51e83e6 Oct 06 15:00:13 crc kubenswrapper[4757]: I1006 15:00:13.254656 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Oct 06 15:00:13 crc kubenswrapper[4757]: I1006 15:00:13.304602 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Oct 06 15:00:13 crc kubenswrapper[4757]: W1006 15:00:13.318784 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbeb86ee0_f17c_4c14_8e47_5dca77324eaa.slice/crio-3da6d5277215545772a828dcf319c895de9f1669946a1b4e95e9d4ead06591df WatchSource:0}: Error finding container 3da6d5277215545772a828dcf319c895de9f1669946a1b4e95e9d4ead06591df: Status 404 returned error can't find the container with id 3da6d5277215545772a828dcf319c895de9f1669946a1b4e95e9d4ead06591df Oct 06 15:00:13 crc kubenswrapper[4757]: I1006 15:00:13.563539 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"beb86ee0-f17c-4c14-8e47-5dca77324eaa","Type":"ContainerStarted","Data":"0a43fef518e79ef018ff27acf71ffb23fa254fd666f95ecb22b63fab5a0eb4e0"} Oct 06 15:00:13 crc kubenswrapper[4757]: I1006 15:00:13.563884 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"beb86ee0-f17c-4c14-8e47-5dca77324eaa","Type":"ContainerStarted","Data":"3da6d5277215545772a828dcf319c895de9f1669946a1b4e95e9d4ead06591df"} Oct 06 15:00:13 crc kubenswrapper[4757]: I1006 15:00:13.563919 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Oct 06 15:00:13 crc kubenswrapper[4757]: I1006 15:00:13.565416 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"ca425bb4-93e3-4d5c-bf23-d7d8f8bb1c8e","Type":"ContainerStarted","Data":"9f49dd08f87ed89596b05198492fefcc9a6ee01b3eb6f785c73574bbe618fc27"} Oct 06 15:00:13 crc kubenswrapper[4757]: I1006 15:00:13.565441 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"ca425bb4-93e3-4d5c-bf23-d7d8f8bb1c8e","Type":"ContainerStarted","Data":"b9c3681bab243712f0db7ba10096a219844928f17b08d3a5064d1a07b51e83e6"} Oct 06 15:00:13 crc kubenswrapper[4757]: I1006 15:00:13.585394 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=1.5853773439999999 podStartE2EDuration="1.585377344s" podCreationTimestamp="2025-10-06 15:00:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 15:00:13.580226997 +0000 UTC m=+4902.077545604" watchObservedRunningTime="2025-10-06 15:00:13.585377344 +0000 UTC m=+4902.082695881" Oct 06 15:00:15 crc kubenswrapper[4757]: I1006 15:00:15.587086 4757 generic.go:334] "Generic (PLEG): container finished" podID="34dcf335-306d-480f-ac3e-cee391886957" containerID="ed7b66266f620e8ea41a9075eb5565db34701eff501c89cae1cf085de5aa9609" exitCode=0 Oct 06 15:00:15 crc kubenswrapper[4757]: I1006 15:00:15.587198 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"34dcf335-306d-480f-ac3e-cee391886957","Type":"ContainerDied","Data":"ed7b66266f620e8ea41a9075eb5565db34701eff501c89cae1cf085de5aa9609"} Oct 06 15:00:16 crc kubenswrapper[4757]: I1006 15:00:16.598607 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"34dcf335-306d-480f-ac3e-cee391886957","Type":"ContainerStarted","Data":"5cd1690baebb4233eda80a61cb29a4a3edbd656757f6d443108a85bc7fbcc38e"} Oct 06 15:00:16 crc kubenswrapper[4757]: I1006 15:00:16.630417 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=7.630377034 podStartE2EDuration="7.630377034s" podCreationTimestamp="2025-10-06 15:00:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 15:00:16.62343569 +0000 UTC m=+4905.120754277" watchObservedRunningTime="2025-10-06 15:00:16.630377034 +0000 UTC m=+4905.127695571" Oct 06 15:00:17 crc kubenswrapper[4757]: I1006 15:00:17.611415 4757 generic.go:334] "Generic (PLEG): container finished" podID="ca425bb4-93e3-4d5c-bf23-d7d8f8bb1c8e" containerID="9f49dd08f87ed89596b05198492fefcc9a6ee01b3eb6f785c73574bbe618fc27" exitCode=0 Oct 06 15:00:17 crc kubenswrapper[4757]: I1006 15:00:17.611518 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"ca425bb4-93e3-4d5c-bf23-d7d8f8bb1c8e","Type":"ContainerDied","Data":"9f49dd08f87ed89596b05198492fefcc9a6ee01b3eb6f785c73574bbe618fc27"} Oct 06 15:00:18 crc kubenswrapper[4757]: I1006 15:00:18.608275 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-74f54d5d49-cwx7t" Oct 06 15:00:18 crc kubenswrapper[4757]: I1006 15:00:18.621530 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"ca425bb4-93e3-4d5c-bf23-d7d8f8bb1c8e","Type":"ContainerStarted","Data":"6d58bb8d163ebb87bf5b3b06c113e0727c94ee1ba56257b20caee8a3476fdb58"} Oct 06 15:00:18 crc kubenswrapper[4757]: I1006 15:00:18.669512 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=7.669482799 podStartE2EDuration="7.669482799s" podCreationTimestamp="2025-10-06 15:00:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 15:00:18.664857439 +0000 UTC m=+4907.162176066" watchObservedRunningTime="2025-10-06 15:00:18.669482799 +0000 UTC m=+4907.166801376" Oct 06 15:00:18 crc kubenswrapper[4757]: I1006 15:00:18.910309 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5cc4f76bbf-f8knj" Oct 06 15:00:18 crc kubenswrapper[4757]: I1006 15:00:18.967274 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-74f54d5d49-cwx7t"] Oct 06 15:00:18 crc kubenswrapper[4757]: I1006 15:00:18.967553 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-74f54d5d49-cwx7t" podUID="f866e41e-f393-40c6-9b82-c0e4eaeb4283" containerName="dnsmasq-dns" containerID="cri-o://181d98753ace49d0efebf79c4afc332d15f59e1c132d91d483ed5ad0453ac2a8" gracePeriod=10 Oct 06 15:00:19 crc kubenswrapper[4757]: I1006 15:00:19.392216 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74f54d5d49-cwx7t" Oct 06 15:00:19 crc kubenswrapper[4757]: I1006 15:00:19.474709 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b4tnp\" (UniqueName: \"kubernetes.io/projected/f866e41e-f393-40c6-9b82-c0e4eaeb4283-kube-api-access-b4tnp\") pod \"f866e41e-f393-40c6-9b82-c0e4eaeb4283\" (UID: \"f866e41e-f393-40c6-9b82-c0e4eaeb4283\") " Oct 06 15:00:19 crc kubenswrapper[4757]: I1006 15:00:19.474766 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f866e41e-f393-40c6-9b82-c0e4eaeb4283-dns-svc\") pod \"f866e41e-f393-40c6-9b82-c0e4eaeb4283\" (UID: \"f866e41e-f393-40c6-9b82-c0e4eaeb4283\") " Oct 06 15:00:19 crc kubenswrapper[4757]: I1006 15:00:19.474947 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f866e41e-f393-40c6-9b82-c0e4eaeb4283-config\") pod \"f866e41e-f393-40c6-9b82-c0e4eaeb4283\" (UID: \"f866e41e-f393-40c6-9b82-c0e4eaeb4283\") " Oct 06 15:00:19 crc kubenswrapper[4757]: I1006 15:00:19.486218 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f866e41e-f393-40c6-9b82-c0e4eaeb4283-kube-api-access-b4tnp" (OuterVolumeSpecName: "kube-api-access-b4tnp") pod "f866e41e-f393-40c6-9b82-c0e4eaeb4283" (UID: "f866e41e-f393-40c6-9b82-c0e4eaeb4283"). InnerVolumeSpecName "kube-api-access-b4tnp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 15:00:19 crc kubenswrapper[4757]: I1006 15:00:19.506788 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f866e41e-f393-40c6-9b82-c0e4eaeb4283-config" (OuterVolumeSpecName: "config") pod "f866e41e-f393-40c6-9b82-c0e4eaeb4283" (UID: "f866e41e-f393-40c6-9b82-c0e4eaeb4283"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 15:00:19 crc kubenswrapper[4757]: I1006 15:00:19.514043 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f866e41e-f393-40c6-9b82-c0e4eaeb4283-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "f866e41e-f393-40c6-9b82-c0e4eaeb4283" (UID: "f866e41e-f393-40c6-9b82-c0e4eaeb4283"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 15:00:19 crc kubenswrapper[4757]: I1006 15:00:19.577114 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b4tnp\" (UniqueName: \"kubernetes.io/projected/f866e41e-f393-40c6-9b82-c0e4eaeb4283-kube-api-access-b4tnp\") on node \"crc\" DevicePath \"\"" Oct 06 15:00:19 crc kubenswrapper[4757]: I1006 15:00:19.577217 4757 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f866e41e-f393-40c6-9b82-c0e4eaeb4283-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 06 15:00:19 crc kubenswrapper[4757]: I1006 15:00:19.577235 4757 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f866e41e-f393-40c6-9b82-c0e4eaeb4283-config\") on node \"crc\" DevicePath \"\"" Oct 06 15:00:19 crc kubenswrapper[4757]: I1006 15:00:19.630939 4757 generic.go:334] "Generic (PLEG): container finished" podID="f866e41e-f393-40c6-9b82-c0e4eaeb4283" containerID="181d98753ace49d0efebf79c4afc332d15f59e1c132d91d483ed5ad0453ac2a8" exitCode=0 Oct 06 15:00:19 crc kubenswrapper[4757]: I1006 15:00:19.630994 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74f54d5d49-cwx7t" event={"ID":"f866e41e-f393-40c6-9b82-c0e4eaeb4283","Type":"ContainerDied","Data":"181d98753ace49d0efebf79c4afc332d15f59e1c132d91d483ed5ad0453ac2a8"} Oct 06 15:00:19 crc kubenswrapper[4757]: I1006 15:00:19.631032 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74f54d5d49-cwx7t" event={"ID":"f866e41e-f393-40c6-9b82-c0e4eaeb4283","Type":"ContainerDied","Data":"f5595d94ce498d89235c384af4e40792d32833bb20b8f4ccab713edcf86b7276"} Oct 06 15:00:19 crc kubenswrapper[4757]: I1006 15:00:19.631059 4757 scope.go:117] "RemoveContainer" containerID="181d98753ace49d0efebf79c4afc332d15f59e1c132d91d483ed5ad0453ac2a8" Oct 06 15:00:19 crc kubenswrapper[4757]: I1006 15:00:19.631242 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74f54d5d49-cwx7t" Oct 06 15:00:19 crc kubenswrapper[4757]: I1006 15:00:19.659089 4757 scope.go:117] "RemoveContainer" containerID="b203c8c41bbcfd467d4951c482b558b9da9cc48e2d590f4e6d7cdd00feb20afd" Oct 06 15:00:19 crc kubenswrapper[4757]: I1006 15:00:19.670406 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-74f54d5d49-cwx7t"] Oct 06 15:00:19 crc kubenswrapper[4757]: I1006 15:00:19.679467 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-74f54d5d49-cwx7t"] Oct 06 15:00:19 crc kubenswrapper[4757]: I1006 15:00:19.686810 4757 scope.go:117] "RemoveContainer" containerID="181d98753ace49d0efebf79c4afc332d15f59e1c132d91d483ed5ad0453ac2a8" Oct 06 15:00:19 crc kubenswrapper[4757]: E1006 15:00:19.687398 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"181d98753ace49d0efebf79c4afc332d15f59e1c132d91d483ed5ad0453ac2a8\": container with ID starting with 181d98753ace49d0efebf79c4afc332d15f59e1c132d91d483ed5ad0453ac2a8 not found: ID does not exist" containerID="181d98753ace49d0efebf79c4afc332d15f59e1c132d91d483ed5ad0453ac2a8" Oct 06 15:00:19 crc kubenswrapper[4757]: I1006 15:00:19.687545 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"181d98753ace49d0efebf79c4afc332d15f59e1c132d91d483ed5ad0453ac2a8"} err="failed to get container status \"181d98753ace49d0efebf79c4afc332d15f59e1c132d91d483ed5ad0453ac2a8\": rpc error: code = NotFound desc = could not find container \"181d98753ace49d0efebf79c4afc332d15f59e1c132d91d483ed5ad0453ac2a8\": container with ID starting with 181d98753ace49d0efebf79c4afc332d15f59e1c132d91d483ed5ad0453ac2a8 not found: ID does not exist" Oct 06 15:00:19 crc kubenswrapper[4757]: I1006 15:00:19.687645 4757 scope.go:117] "RemoveContainer" containerID="b203c8c41bbcfd467d4951c482b558b9da9cc48e2d590f4e6d7cdd00feb20afd" Oct 06 15:00:19 crc kubenswrapper[4757]: E1006 15:00:19.689130 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b203c8c41bbcfd467d4951c482b558b9da9cc48e2d590f4e6d7cdd00feb20afd\": container with ID starting with b203c8c41bbcfd467d4951c482b558b9da9cc48e2d590f4e6d7cdd00feb20afd not found: ID does not exist" containerID="b203c8c41bbcfd467d4951c482b558b9da9cc48e2d590f4e6d7cdd00feb20afd" Oct 06 15:00:19 crc kubenswrapper[4757]: I1006 15:00:19.689249 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b203c8c41bbcfd467d4951c482b558b9da9cc48e2d590f4e6d7cdd00feb20afd"} err="failed to get container status \"b203c8c41bbcfd467d4951c482b558b9da9cc48e2d590f4e6d7cdd00feb20afd\": rpc error: code = NotFound desc = could not find container \"b203c8c41bbcfd467d4951c482b558b9da9cc48e2d590f4e6d7cdd00feb20afd\": container with ID starting with b203c8c41bbcfd467d4951c482b558b9da9cc48e2d590f4e6d7cdd00feb20afd not found: ID does not exist" Oct 06 15:00:20 crc kubenswrapper[4757]: I1006 15:00:20.194840 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f866e41e-f393-40c6-9b82-c0e4eaeb4283" path="/var/lib/kubelet/pods/f866e41e-f393-40c6-9b82-c0e4eaeb4283/volumes" Oct 06 15:00:20 crc kubenswrapper[4757]: E1006 15:00:20.493905 4757 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.199:52292->38.102.83.199:42407: write tcp 38.102.83.199:52292->38.102.83.199:42407: write: connection reset by peer Oct 06 15:00:21 crc kubenswrapper[4757]: I1006 15:00:21.506611 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Oct 06 15:00:21 crc kubenswrapper[4757]: I1006 15:00:21.507491 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Oct 06 15:00:22 crc kubenswrapper[4757]: I1006 15:00:22.804422 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Oct 06 15:00:22 crc kubenswrapper[4757]: I1006 15:00:22.804866 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Oct 06 15:00:22 crc kubenswrapper[4757]: I1006 15:00:22.846425 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Oct 06 15:00:23 crc kubenswrapper[4757]: I1006 15:00:23.584724 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Oct 06 15:00:23 crc kubenswrapper[4757]: I1006 15:00:23.633435 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Oct 06 15:00:24 crc kubenswrapper[4757]: I1006 15:00:24.888148 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Oct 06 15:00:24 crc kubenswrapper[4757]: I1006 15:00:24.956150 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Oct 06 15:00:34 crc kubenswrapper[4757]: I1006 15:00:34.361033 4757 patch_prober.go:28] interesting pod/machine-config-daemon-7tb7h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 06 15:00:34 crc kubenswrapper[4757]: I1006 15:00:34.361807 4757 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 06 15:00:37 crc kubenswrapper[4757]: I1006 15:00:37.288385 4757 scope.go:117] "RemoveContainer" containerID="863f7f23ac6fdd7c290d83cb3070f8f0e6f10ccdb3df54377ffade28ecd84f7e" Oct 06 15:00:43 crc kubenswrapper[4757]: I1006 15:00:43.840080 4757 generic.go:334] "Generic (PLEG): container finished" podID="2d9fa0d1-9bdd-44f2-aa80-0037e7249d51" containerID="a872d004b4d06df5aa1bdcccab07ae0d9ebf46963a551bddc81a482db28c1328" exitCode=0 Oct 06 15:00:43 crc kubenswrapper[4757]: I1006 15:00:43.840139 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"2d9fa0d1-9bdd-44f2-aa80-0037e7249d51","Type":"ContainerDied","Data":"a872d004b4d06df5aa1bdcccab07ae0d9ebf46963a551bddc81a482db28c1328"} Oct 06 15:00:44 crc kubenswrapper[4757]: I1006 15:00:44.849358 4757 generic.go:334] "Generic (PLEG): container finished" podID="7ee79e18-42a9-4a45-81e7-5a0ca8a6a841" containerID="4514b567d73b3f73b637b17918f148b353d48d9811720eb1a8d54b5ca320d449" exitCode=0 Oct 06 15:00:44 crc kubenswrapper[4757]: I1006 15:00:44.849470 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"7ee79e18-42a9-4a45-81e7-5a0ca8a6a841","Type":"ContainerDied","Data":"4514b567d73b3f73b637b17918f148b353d48d9811720eb1a8d54b5ca320d449"} Oct 06 15:00:44 crc kubenswrapper[4757]: I1006 15:00:44.852214 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"2d9fa0d1-9bdd-44f2-aa80-0037e7249d51","Type":"ContainerStarted","Data":"cdbf66a122d8d0d2ccec5182d632f2b165511b0fd732f2d33a4df51e4425ac83"} Oct 06 15:00:44 crc kubenswrapper[4757]: I1006 15:00:44.852438 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Oct 06 15:00:44 crc kubenswrapper[4757]: I1006 15:00:44.941527 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=36.941502039 podStartE2EDuration="36.941502039s" podCreationTimestamp="2025-10-06 15:00:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 15:00:44.938913365 +0000 UTC m=+4933.436231932" watchObservedRunningTime="2025-10-06 15:00:44.941502039 +0000 UTC m=+4933.438820586" Oct 06 15:00:45 crc kubenswrapper[4757]: I1006 15:00:45.863451 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"7ee79e18-42a9-4a45-81e7-5a0ca8a6a841","Type":"ContainerStarted","Data":"46a2075657a8d970b91ec0de84e3dc09f7f24a33800fe2685701409e2a3ec0ef"} Oct 06 15:00:45 crc kubenswrapper[4757]: I1006 15:00:45.864125 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Oct 06 15:00:45 crc kubenswrapper[4757]: I1006 15:00:45.882565 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=37.882546067 podStartE2EDuration="37.882546067s" podCreationTimestamp="2025-10-06 15:00:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 15:00:45.882168525 +0000 UTC m=+4934.379487062" watchObservedRunningTime="2025-10-06 15:00:45.882546067 +0000 UTC m=+4934.379864604" Oct 06 15:01:00 crc kubenswrapper[4757]: I1006 15:01:00.081309 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Oct 06 15:01:00 crc kubenswrapper[4757]: I1006 15:01:00.347319 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Oct 06 15:01:04 crc kubenswrapper[4757]: I1006 15:01:04.304675 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5b9989c9f7-4zdvt"] Oct 06 15:01:04 crc kubenswrapper[4757]: E1006 15:01:04.305544 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f866e41e-f393-40c6-9b82-c0e4eaeb4283" containerName="dnsmasq-dns" Oct 06 15:01:04 crc kubenswrapper[4757]: I1006 15:01:04.305567 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="f866e41e-f393-40c6-9b82-c0e4eaeb4283" containerName="dnsmasq-dns" Oct 06 15:01:04 crc kubenswrapper[4757]: E1006 15:01:04.305595 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f866e41e-f393-40c6-9b82-c0e4eaeb4283" containerName="init" Oct 06 15:01:04 crc kubenswrapper[4757]: I1006 15:01:04.305606 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="f866e41e-f393-40c6-9b82-c0e4eaeb4283" containerName="init" Oct 06 15:01:04 crc kubenswrapper[4757]: I1006 15:01:04.305880 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="f866e41e-f393-40c6-9b82-c0e4eaeb4283" containerName="dnsmasq-dns" Oct 06 15:01:04 crc kubenswrapper[4757]: I1006 15:01:04.309571 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b9989c9f7-4zdvt" Oct 06 15:01:04 crc kubenswrapper[4757]: I1006 15:01:04.324181 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5b9989c9f7-4zdvt"] Oct 06 15:01:04 crc kubenswrapper[4757]: I1006 15:01:04.361407 4757 patch_prober.go:28] interesting pod/machine-config-daemon-7tb7h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 06 15:01:04 crc kubenswrapper[4757]: I1006 15:01:04.361778 4757 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 06 15:01:04 crc kubenswrapper[4757]: I1006 15:01:04.393324 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2bd44821-3471-4f95-a1b3-9d751852ed32-config\") pod \"dnsmasq-dns-5b9989c9f7-4zdvt\" (UID: \"2bd44821-3471-4f95-a1b3-9d751852ed32\") " pod="openstack/dnsmasq-dns-5b9989c9f7-4zdvt" Oct 06 15:01:04 crc kubenswrapper[4757]: I1006 15:01:04.393390 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8cx9g\" (UniqueName: \"kubernetes.io/projected/2bd44821-3471-4f95-a1b3-9d751852ed32-kube-api-access-8cx9g\") pod \"dnsmasq-dns-5b9989c9f7-4zdvt\" (UID: \"2bd44821-3471-4f95-a1b3-9d751852ed32\") " pod="openstack/dnsmasq-dns-5b9989c9f7-4zdvt" Oct 06 15:01:04 crc kubenswrapper[4757]: I1006 15:01:04.393423 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2bd44821-3471-4f95-a1b3-9d751852ed32-dns-svc\") pod \"dnsmasq-dns-5b9989c9f7-4zdvt\" (UID: \"2bd44821-3471-4f95-a1b3-9d751852ed32\") " pod="openstack/dnsmasq-dns-5b9989c9f7-4zdvt" Oct 06 15:01:04 crc kubenswrapper[4757]: I1006 15:01:04.494524 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2bd44821-3471-4f95-a1b3-9d751852ed32-config\") pod \"dnsmasq-dns-5b9989c9f7-4zdvt\" (UID: \"2bd44821-3471-4f95-a1b3-9d751852ed32\") " pod="openstack/dnsmasq-dns-5b9989c9f7-4zdvt" Oct 06 15:01:04 crc kubenswrapper[4757]: I1006 15:01:04.494585 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8cx9g\" (UniqueName: \"kubernetes.io/projected/2bd44821-3471-4f95-a1b3-9d751852ed32-kube-api-access-8cx9g\") pod \"dnsmasq-dns-5b9989c9f7-4zdvt\" (UID: \"2bd44821-3471-4f95-a1b3-9d751852ed32\") " pod="openstack/dnsmasq-dns-5b9989c9f7-4zdvt" Oct 06 15:01:04 crc kubenswrapper[4757]: I1006 15:01:04.494606 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2bd44821-3471-4f95-a1b3-9d751852ed32-dns-svc\") pod \"dnsmasq-dns-5b9989c9f7-4zdvt\" (UID: \"2bd44821-3471-4f95-a1b3-9d751852ed32\") " pod="openstack/dnsmasq-dns-5b9989c9f7-4zdvt" Oct 06 15:01:04 crc kubenswrapper[4757]: I1006 15:01:04.495433 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2bd44821-3471-4f95-a1b3-9d751852ed32-dns-svc\") pod \"dnsmasq-dns-5b9989c9f7-4zdvt\" (UID: \"2bd44821-3471-4f95-a1b3-9d751852ed32\") " pod="openstack/dnsmasq-dns-5b9989c9f7-4zdvt" Oct 06 15:01:04 crc kubenswrapper[4757]: I1006 15:01:04.495932 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2bd44821-3471-4f95-a1b3-9d751852ed32-config\") pod \"dnsmasq-dns-5b9989c9f7-4zdvt\" (UID: \"2bd44821-3471-4f95-a1b3-9d751852ed32\") " pod="openstack/dnsmasq-dns-5b9989c9f7-4zdvt" Oct 06 15:01:04 crc kubenswrapper[4757]: I1006 15:01:04.516841 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8cx9g\" (UniqueName: \"kubernetes.io/projected/2bd44821-3471-4f95-a1b3-9d751852ed32-kube-api-access-8cx9g\") pod \"dnsmasq-dns-5b9989c9f7-4zdvt\" (UID: \"2bd44821-3471-4f95-a1b3-9d751852ed32\") " pod="openstack/dnsmasq-dns-5b9989c9f7-4zdvt" Oct 06 15:01:04 crc kubenswrapper[4757]: I1006 15:01:04.637262 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b9989c9f7-4zdvt" Oct 06 15:01:04 crc kubenswrapper[4757]: I1006 15:01:04.971912 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 06 15:01:05 crc kubenswrapper[4757]: I1006 15:01:05.074315 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5b9989c9f7-4zdvt"] Oct 06 15:01:05 crc kubenswrapper[4757]: W1006 15:01:05.079647 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2bd44821_3471_4f95_a1b3_9d751852ed32.slice/crio-53c740f60446a338aaaab1e01c7b7be28b0270438fea9a27426c9fd6cda1ff2c WatchSource:0}: Error finding container 53c740f60446a338aaaab1e01c7b7be28b0270438fea9a27426c9fd6cda1ff2c: Status 404 returned error can't find the container with id 53c740f60446a338aaaab1e01c7b7be28b0270438fea9a27426c9fd6cda1ff2c Oct 06 15:01:05 crc kubenswrapper[4757]: I1006 15:01:05.814822 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 06 15:01:06 crc kubenswrapper[4757]: I1006 15:01:06.034577 4757 generic.go:334] "Generic (PLEG): container finished" podID="2bd44821-3471-4f95-a1b3-9d751852ed32" containerID="eb410850917fa50cbd0546a549dd4d800303f4f32397d400df27145058e3efea" exitCode=0 Oct 06 15:01:06 crc kubenswrapper[4757]: I1006 15:01:06.034621 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b9989c9f7-4zdvt" event={"ID":"2bd44821-3471-4f95-a1b3-9d751852ed32","Type":"ContainerDied","Data":"eb410850917fa50cbd0546a549dd4d800303f4f32397d400df27145058e3efea"} Oct 06 15:01:06 crc kubenswrapper[4757]: I1006 15:01:06.034646 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b9989c9f7-4zdvt" event={"ID":"2bd44821-3471-4f95-a1b3-9d751852ed32","Type":"ContainerStarted","Data":"53c740f60446a338aaaab1e01c7b7be28b0270438fea9a27426c9fd6cda1ff2c"} Oct 06 15:01:07 crc kubenswrapper[4757]: I1006 15:01:07.041983 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b9989c9f7-4zdvt" event={"ID":"2bd44821-3471-4f95-a1b3-9d751852ed32","Type":"ContainerStarted","Data":"e0d7c08f91f065414abc28e45c78157f2e847ef09cf99a5af832bd0047a7dfd3"} Oct 06 15:01:07 crc kubenswrapper[4757]: I1006 15:01:07.042378 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5b9989c9f7-4zdvt" Oct 06 15:01:07 crc kubenswrapper[4757]: I1006 15:01:07.058710 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5b9989c9f7-4zdvt" podStartSLOduration=3.058692178 podStartE2EDuration="3.058692178s" podCreationTimestamp="2025-10-06 15:01:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 15:01:07.05814151 +0000 UTC m=+4955.555460047" watchObservedRunningTime="2025-10-06 15:01:07.058692178 +0000 UTC m=+4955.556010715" Oct 06 15:01:09 crc kubenswrapper[4757]: I1006 15:01:09.044464 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="7ee79e18-42a9-4a45-81e7-5a0ca8a6a841" containerName="rabbitmq" containerID="cri-o://46a2075657a8d970b91ec0de84e3dc09f7f24a33800fe2685701409e2a3ec0ef" gracePeriod=604796 Oct 06 15:01:09 crc kubenswrapper[4757]: I1006 15:01:09.839627 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="2d9fa0d1-9bdd-44f2-aa80-0037e7249d51" containerName="rabbitmq" containerID="cri-o://cdbf66a122d8d0d2ccec5182d632f2b165511b0fd732f2d33a4df51e4425ac83" gracePeriod=604796 Oct 06 15:01:10 crc kubenswrapper[4757]: I1006 15:01:10.080034 4757 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="2d9fa0d1-9bdd-44f2-aa80-0037e7249d51" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.243:5671: connect: connection refused" Oct 06 15:01:10 crc kubenswrapper[4757]: I1006 15:01:10.345756 4757 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="7ee79e18-42a9-4a45-81e7-5a0ca8a6a841" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.244:5671: connect: connection refused" Oct 06 15:01:14 crc kubenswrapper[4757]: I1006 15:01:14.638232 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5b9989c9f7-4zdvt" Oct 06 15:01:14 crc kubenswrapper[4757]: I1006 15:01:14.719252 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5cc4f76bbf-f8knj"] Oct 06 15:01:14 crc kubenswrapper[4757]: I1006 15:01:14.719626 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5cc4f76bbf-f8knj" podUID="ec6b46ec-9e74-4432-992c-e5a92a8d38e3" containerName="dnsmasq-dns" containerID="cri-o://1c3cc55e334173778a603da46bc6dfb0988e11216491656f517c5fb341c8f5af" gracePeriod=10 Oct 06 15:01:15 crc kubenswrapper[4757]: I1006 15:01:15.108038 4757 generic.go:334] "Generic (PLEG): container finished" podID="ec6b46ec-9e74-4432-992c-e5a92a8d38e3" containerID="1c3cc55e334173778a603da46bc6dfb0988e11216491656f517c5fb341c8f5af" exitCode=0 Oct 06 15:01:15 crc kubenswrapper[4757]: I1006 15:01:15.108145 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5cc4f76bbf-f8knj" event={"ID":"ec6b46ec-9e74-4432-992c-e5a92a8d38e3","Type":"ContainerDied","Data":"1c3cc55e334173778a603da46bc6dfb0988e11216491656f517c5fb341c8f5af"} Oct 06 15:01:15 crc kubenswrapper[4757]: I1006 15:01:15.290571 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5cc4f76bbf-f8knj" Oct 06 15:01:15 crc kubenswrapper[4757]: I1006 15:01:15.413411 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ec6b46ec-9e74-4432-992c-e5a92a8d38e3-config\") pod \"ec6b46ec-9e74-4432-992c-e5a92a8d38e3\" (UID: \"ec6b46ec-9e74-4432-992c-e5a92a8d38e3\") " Oct 06 15:01:15 crc kubenswrapper[4757]: I1006 15:01:15.413470 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nh6xg\" (UniqueName: \"kubernetes.io/projected/ec6b46ec-9e74-4432-992c-e5a92a8d38e3-kube-api-access-nh6xg\") pod \"ec6b46ec-9e74-4432-992c-e5a92a8d38e3\" (UID: \"ec6b46ec-9e74-4432-992c-e5a92a8d38e3\") " Oct 06 15:01:15 crc kubenswrapper[4757]: I1006 15:01:15.413496 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ec6b46ec-9e74-4432-992c-e5a92a8d38e3-dns-svc\") pod \"ec6b46ec-9e74-4432-992c-e5a92a8d38e3\" (UID: \"ec6b46ec-9e74-4432-992c-e5a92a8d38e3\") " Oct 06 15:01:15 crc kubenswrapper[4757]: I1006 15:01:15.427599 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ec6b46ec-9e74-4432-992c-e5a92a8d38e3-kube-api-access-nh6xg" (OuterVolumeSpecName: "kube-api-access-nh6xg") pod "ec6b46ec-9e74-4432-992c-e5a92a8d38e3" (UID: "ec6b46ec-9e74-4432-992c-e5a92a8d38e3"). InnerVolumeSpecName "kube-api-access-nh6xg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 15:01:15 crc kubenswrapper[4757]: I1006 15:01:15.457039 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ec6b46ec-9e74-4432-992c-e5a92a8d38e3-config" (OuterVolumeSpecName: "config") pod "ec6b46ec-9e74-4432-992c-e5a92a8d38e3" (UID: "ec6b46ec-9e74-4432-992c-e5a92a8d38e3"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 15:01:15 crc kubenswrapper[4757]: I1006 15:01:15.461735 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ec6b46ec-9e74-4432-992c-e5a92a8d38e3-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "ec6b46ec-9e74-4432-992c-e5a92a8d38e3" (UID: "ec6b46ec-9e74-4432-992c-e5a92a8d38e3"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 15:01:15 crc kubenswrapper[4757]: I1006 15:01:15.515271 4757 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ec6b46ec-9e74-4432-992c-e5a92a8d38e3-config\") on node \"crc\" DevicePath \"\"" Oct 06 15:01:15 crc kubenswrapper[4757]: I1006 15:01:15.515302 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nh6xg\" (UniqueName: \"kubernetes.io/projected/ec6b46ec-9e74-4432-992c-e5a92a8d38e3-kube-api-access-nh6xg\") on node \"crc\" DevicePath \"\"" Oct 06 15:01:15 crc kubenswrapper[4757]: I1006 15:01:15.515312 4757 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ec6b46ec-9e74-4432-992c-e5a92a8d38e3-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 06 15:01:15 crc kubenswrapper[4757]: I1006 15:01:15.531117 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 06 15:01:15 crc kubenswrapper[4757]: I1006 15:01:15.717336 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7ee79e18-42a9-4a45-81e7-5a0ca8a6a841-config-data\") pod \"7ee79e18-42a9-4a45-81e7-5a0ca8a6a841\" (UID: \"7ee79e18-42a9-4a45-81e7-5a0ca8a6a841\") " Oct 06 15:01:15 crc kubenswrapper[4757]: I1006 15:01:15.717385 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/7ee79e18-42a9-4a45-81e7-5a0ca8a6a841-rabbitmq-tls\") pod \"7ee79e18-42a9-4a45-81e7-5a0ca8a6a841\" (UID: \"7ee79e18-42a9-4a45-81e7-5a0ca8a6a841\") " Oct 06 15:01:15 crc kubenswrapper[4757]: I1006 15:01:15.717439 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/7ee79e18-42a9-4a45-81e7-5a0ca8a6a841-rabbitmq-erlang-cookie\") pod \"7ee79e18-42a9-4a45-81e7-5a0ca8a6a841\" (UID: \"7ee79e18-42a9-4a45-81e7-5a0ca8a6a841\") " Oct 06 15:01:15 crc kubenswrapper[4757]: I1006 15:01:15.717471 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/7ee79e18-42a9-4a45-81e7-5a0ca8a6a841-plugins-conf\") pod \"7ee79e18-42a9-4a45-81e7-5a0ca8a6a841\" (UID: \"7ee79e18-42a9-4a45-81e7-5a0ca8a6a841\") " Oct 06 15:01:15 crc kubenswrapper[4757]: I1006 15:01:15.717491 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/7ee79e18-42a9-4a45-81e7-5a0ca8a6a841-pod-info\") pod \"7ee79e18-42a9-4a45-81e7-5a0ca8a6a841\" (UID: \"7ee79e18-42a9-4a45-81e7-5a0ca8a6a841\") " Oct 06 15:01:15 crc kubenswrapper[4757]: I1006 15:01:15.717517 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/7ee79e18-42a9-4a45-81e7-5a0ca8a6a841-rabbitmq-plugins\") pod \"7ee79e18-42a9-4a45-81e7-5a0ca8a6a841\" (UID: \"7ee79e18-42a9-4a45-81e7-5a0ca8a6a841\") " Oct 06 15:01:15 crc kubenswrapper[4757]: I1006 15:01:15.717670 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/7ee79e18-42a9-4a45-81e7-5a0ca8a6a841-erlang-cookie-secret\") pod \"7ee79e18-42a9-4a45-81e7-5a0ca8a6a841\" (UID: \"7ee79e18-42a9-4a45-81e7-5a0ca8a6a841\") " Oct 06 15:01:15 crc kubenswrapper[4757]: I1006 15:01:15.717699 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9mr99\" (UniqueName: \"kubernetes.io/projected/7ee79e18-42a9-4a45-81e7-5a0ca8a6a841-kube-api-access-9mr99\") pod \"7ee79e18-42a9-4a45-81e7-5a0ca8a6a841\" (UID: \"7ee79e18-42a9-4a45-81e7-5a0ca8a6a841\") " Oct 06 15:01:15 crc kubenswrapper[4757]: I1006 15:01:15.718191 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7ee79e18-42a9-4a45-81e7-5a0ca8a6a841-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "7ee79e18-42a9-4a45-81e7-5a0ca8a6a841" (UID: "7ee79e18-42a9-4a45-81e7-5a0ca8a6a841"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 15:01:15 crc kubenswrapper[4757]: I1006 15:01:15.718234 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/7ee79e18-42a9-4a45-81e7-5a0ca8a6a841-rabbitmq-confd\") pod \"7ee79e18-42a9-4a45-81e7-5a0ca8a6a841\" (UID: \"7ee79e18-42a9-4a45-81e7-5a0ca8a6a841\") " Oct 06 15:01:15 crc kubenswrapper[4757]: I1006 15:01:15.718266 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7ee79e18-42a9-4a45-81e7-5a0ca8a6a841-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "7ee79e18-42a9-4a45-81e7-5a0ca8a6a841" (UID: "7ee79e18-42a9-4a45-81e7-5a0ca8a6a841"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 15:01:15 crc kubenswrapper[4757]: I1006 15:01:15.718282 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7ee79e18-42a9-4a45-81e7-5a0ca8a6a841-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "7ee79e18-42a9-4a45-81e7-5a0ca8a6a841" (UID: "7ee79e18-42a9-4a45-81e7-5a0ca8a6a841"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 15:01:15 crc kubenswrapper[4757]: I1006 15:01:15.718336 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e94b737f-eafe-4f37-93ee-0568a7d5228a\") pod \"7ee79e18-42a9-4a45-81e7-5a0ca8a6a841\" (UID: \"7ee79e18-42a9-4a45-81e7-5a0ca8a6a841\") " Oct 06 15:01:15 crc kubenswrapper[4757]: I1006 15:01:15.718369 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/7ee79e18-42a9-4a45-81e7-5a0ca8a6a841-server-conf\") pod \"7ee79e18-42a9-4a45-81e7-5a0ca8a6a841\" (UID: \"7ee79e18-42a9-4a45-81e7-5a0ca8a6a841\") " Oct 06 15:01:15 crc kubenswrapper[4757]: I1006 15:01:15.718691 4757 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/7ee79e18-42a9-4a45-81e7-5a0ca8a6a841-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Oct 06 15:01:15 crc kubenswrapper[4757]: I1006 15:01:15.718706 4757 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/7ee79e18-42a9-4a45-81e7-5a0ca8a6a841-plugins-conf\") on node \"crc\" DevicePath \"\"" Oct 06 15:01:15 crc kubenswrapper[4757]: I1006 15:01:15.718717 4757 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/7ee79e18-42a9-4a45-81e7-5a0ca8a6a841-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Oct 06 15:01:15 crc kubenswrapper[4757]: I1006 15:01:15.720886 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/7ee79e18-42a9-4a45-81e7-5a0ca8a6a841-pod-info" (OuterVolumeSpecName: "pod-info") pod "7ee79e18-42a9-4a45-81e7-5a0ca8a6a841" (UID: "7ee79e18-42a9-4a45-81e7-5a0ca8a6a841"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Oct 06 15:01:15 crc kubenswrapper[4757]: I1006 15:01:15.723642 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7ee79e18-42a9-4a45-81e7-5a0ca8a6a841-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "7ee79e18-42a9-4a45-81e7-5a0ca8a6a841" (UID: "7ee79e18-42a9-4a45-81e7-5a0ca8a6a841"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 15:01:15 crc kubenswrapper[4757]: I1006 15:01:15.723658 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7ee79e18-42a9-4a45-81e7-5a0ca8a6a841-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "7ee79e18-42a9-4a45-81e7-5a0ca8a6a841" (UID: "7ee79e18-42a9-4a45-81e7-5a0ca8a6a841"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 15:01:15 crc kubenswrapper[4757]: I1006 15:01:15.723721 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7ee79e18-42a9-4a45-81e7-5a0ca8a6a841-kube-api-access-9mr99" (OuterVolumeSpecName: "kube-api-access-9mr99") pod "7ee79e18-42a9-4a45-81e7-5a0ca8a6a841" (UID: "7ee79e18-42a9-4a45-81e7-5a0ca8a6a841"). InnerVolumeSpecName "kube-api-access-9mr99". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 15:01:15 crc kubenswrapper[4757]: I1006 15:01:15.736694 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7ee79e18-42a9-4a45-81e7-5a0ca8a6a841-config-data" (OuterVolumeSpecName: "config-data") pod "7ee79e18-42a9-4a45-81e7-5a0ca8a6a841" (UID: "7ee79e18-42a9-4a45-81e7-5a0ca8a6a841"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 15:01:15 crc kubenswrapper[4757]: I1006 15:01:15.739013 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e94b737f-eafe-4f37-93ee-0568a7d5228a" (OuterVolumeSpecName: "persistence") pod "7ee79e18-42a9-4a45-81e7-5a0ca8a6a841" (UID: "7ee79e18-42a9-4a45-81e7-5a0ca8a6a841"). InnerVolumeSpecName "pvc-e94b737f-eafe-4f37-93ee-0568a7d5228a". PluginName "kubernetes.io/csi", VolumeGidValue "" Oct 06 15:01:15 crc kubenswrapper[4757]: I1006 15:01:15.753187 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7ee79e18-42a9-4a45-81e7-5a0ca8a6a841-server-conf" (OuterVolumeSpecName: "server-conf") pod "7ee79e18-42a9-4a45-81e7-5a0ca8a6a841" (UID: "7ee79e18-42a9-4a45-81e7-5a0ca8a6a841"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 15:01:15 crc kubenswrapper[4757]: I1006 15:01:15.787482 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7ee79e18-42a9-4a45-81e7-5a0ca8a6a841-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "7ee79e18-42a9-4a45-81e7-5a0ca8a6a841" (UID: "7ee79e18-42a9-4a45-81e7-5a0ca8a6a841"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 15:01:15 crc kubenswrapper[4757]: I1006 15:01:15.819622 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9mr99\" (UniqueName: \"kubernetes.io/projected/7ee79e18-42a9-4a45-81e7-5a0ca8a6a841-kube-api-access-9mr99\") on node \"crc\" DevicePath \"\"" Oct 06 15:01:15 crc kubenswrapper[4757]: I1006 15:01:15.819650 4757 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/7ee79e18-42a9-4a45-81e7-5a0ca8a6a841-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Oct 06 15:01:15 crc kubenswrapper[4757]: I1006 15:01:15.819681 4757 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-e94b737f-eafe-4f37-93ee-0568a7d5228a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e94b737f-eafe-4f37-93ee-0568a7d5228a\") on node \"crc\" " Oct 06 15:01:15 crc kubenswrapper[4757]: I1006 15:01:15.819694 4757 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/7ee79e18-42a9-4a45-81e7-5a0ca8a6a841-server-conf\") on node \"crc\" DevicePath \"\"" Oct 06 15:01:15 crc kubenswrapper[4757]: I1006 15:01:15.819743 4757 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7ee79e18-42a9-4a45-81e7-5a0ca8a6a841-config-data\") on node \"crc\" DevicePath \"\"" Oct 06 15:01:15 crc kubenswrapper[4757]: I1006 15:01:15.819758 4757 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/7ee79e18-42a9-4a45-81e7-5a0ca8a6a841-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Oct 06 15:01:15 crc kubenswrapper[4757]: I1006 15:01:15.819770 4757 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/7ee79e18-42a9-4a45-81e7-5a0ca8a6a841-pod-info\") on node \"crc\" DevicePath \"\"" Oct 06 15:01:15 crc kubenswrapper[4757]: I1006 15:01:15.819781 4757 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/7ee79e18-42a9-4a45-81e7-5a0ca8a6a841-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Oct 06 15:01:15 crc kubenswrapper[4757]: I1006 15:01:15.844108 4757 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Oct 06 15:01:15 crc kubenswrapper[4757]: I1006 15:01:15.844314 4757 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-e94b737f-eafe-4f37-93ee-0568a7d5228a" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e94b737f-eafe-4f37-93ee-0568a7d5228a") on node "crc" Oct 06 15:01:15 crc kubenswrapper[4757]: I1006 15:01:15.921617 4757 reconciler_common.go:293] "Volume detached for volume \"pvc-e94b737f-eafe-4f37-93ee-0568a7d5228a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e94b737f-eafe-4f37-93ee-0568a7d5228a\") on node \"crc\" DevicePath \"\"" Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.121805 4757 generic.go:334] "Generic (PLEG): container finished" podID="2d9fa0d1-9bdd-44f2-aa80-0037e7249d51" containerID="cdbf66a122d8d0d2ccec5182d632f2b165511b0fd732f2d33a4df51e4425ac83" exitCode=0 Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.121850 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"2d9fa0d1-9bdd-44f2-aa80-0037e7249d51","Type":"ContainerDied","Data":"cdbf66a122d8d0d2ccec5182d632f2b165511b0fd732f2d33a4df51e4425ac83"} Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.126739 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5cc4f76bbf-f8knj" Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.126742 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5cc4f76bbf-f8knj" event={"ID":"ec6b46ec-9e74-4432-992c-e5a92a8d38e3","Type":"ContainerDied","Data":"7960783aab4501d0e9a072c90894f43ddca613e9bfb66fbb165ac2d965806718"} Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.126844 4757 scope.go:117] "RemoveContainer" containerID="1c3cc55e334173778a603da46bc6dfb0988e11216491656f517c5fb341c8f5af" Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.135404 4757 generic.go:334] "Generic (PLEG): container finished" podID="7ee79e18-42a9-4a45-81e7-5a0ca8a6a841" containerID="46a2075657a8d970b91ec0de84e3dc09f7f24a33800fe2685701409e2a3ec0ef" exitCode=0 Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.135453 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"7ee79e18-42a9-4a45-81e7-5a0ca8a6a841","Type":"ContainerDied","Data":"46a2075657a8d970b91ec0de84e3dc09f7f24a33800fe2685701409e2a3ec0ef"} Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.135481 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"7ee79e18-42a9-4a45-81e7-5a0ca8a6a841","Type":"ContainerDied","Data":"fee9be4d2079a615d391a716f8ad1459ebdf12ee0d818dd61a60b415abec1320"} Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.135499 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.166299 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5cc4f76bbf-f8knj"] Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.173569 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5cc4f76bbf-f8knj"] Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.175364 4757 scope.go:117] "RemoveContainer" containerID="553995ace36a30f6b134e1b91667dc6dbff12672f1243ef6e7ee6d6c8bc14e6d" Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.208360 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ec6b46ec-9e74-4432-992c-e5a92a8d38e3" path="/var/lib/kubelet/pods/ec6b46ec-9e74-4432-992c-e5a92a8d38e3/volumes" Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.208970 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.208998 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.223263 4757 scope.go:117] "RemoveContainer" containerID="46a2075657a8d970b91ec0de84e3dc09f7f24a33800fe2685701409e2a3ec0ef" Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.232034 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Oct 06 15:01:16 crc kubenswrapper[4757]: E1006 15:01:16.232459 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ec6b46ec-9e74-4432-992c-e5a92a8d38e3" containerName="dnsmasq-dns" Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.232482 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="ec6b46ec-9e74-4432-992c-e5a92a8d38e3" containerName="dnsmasq-dns" Oct 06 15:01:16 crc kubenswrapper[4757]: E1006 15:01:16.232499 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7ee79e18-42a9-4a45-81e7-5a0ca8a6a841" containerName="rabbitmq" Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.232507 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="7ee79e18-42a9-4a45-81e7-5a0ca8a6a841" containerName="rabbitmq" Oct 06 15:01:16 crc kubenswrapper[4757]: E1006 15:01:16.232527 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7ee79e18-42a9-4a45-81e7-5a0ca8a6a841" containerName="setup-container" Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.232536 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="7ee79e18-42a9-4a45-81e7-5a0ca8a6a841" containerName="setup-container" Oct 06 15:01:16 crc kubenswrapper[4757]: E1006 15:01:16.232568 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ec6b46ec-9e74-4432-992c-e5a92a8d38e3" containerName="init" Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.232576 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="ec6b46ec-9e74-4432-992c-e5a92a8d38e3" containerName="init" Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.232752 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="7ee79e18-42a9-4a45-81e7-5a0ca8a6a841" containerName="rabbitmq" Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.232789 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="ec6b46ec-9e74-4432-992c-e5a92a8d38e3" containerName="dnsmasq-dns" Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.235349 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.237937 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.239036 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-5cfkr" Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.240138 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.240165 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.240230 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.240673 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.242226 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.242639 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.273286 4757 scope.go:117] "RemoveContainer" containerID="4514b567d73b3f73b637b17918f148b353d48d9811720eb1a8d54b5ca320d449" Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.302054 4757 scope.go:117] "RemoveContainer" containerID="46a2075657a8d970b91ec0de84e3dc09f7f24a33800fe2685701409e2a3ec0ef" Oct 06 15:01:16 crc kubenswrapper[4757]: E1006 15:01:16.302564 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"46a2075657a8d970b91ec0de84e3dc09f7f24a33800fe2685701409e2a3ec0ef\": container with ID starting with 46a2075657a8d970b91ec0de84e3dc09f7f24a33800fe2685701409e2a3ec0ef not found: ID does not exist" containerID="46a2075657a8d970b91ec0de84e3dc09f7f24a33800fe2685701409e2a3ec0ef" Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.302650 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"46a2075657a8d970b91ec0de84e3dc09f7f24a33800fe2685701409e2a3ec0ef"} err="failed to get container status \"46a2075657a8d970b91ec0de84e3dc09f7f24a33800fe2685701409e2a3ec0ef\": rpc error: code = NotFound desc = could not find container \"46a2075657a8d970b91ec0de84e3dc09f7f24a33800fe2685701409e2a3ec0ef\": container with ID starting with 46a2075657a8d970b91ec0de84e3dc09f7f24a33800fe2685701409e2a3ec0ef not found: ID does not exist" Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.302726 4757 scope.go:117] "RemoveContainer" containerID="4514b567d73b3f73b637b17918f148b353d48d9811720eb1a8d54b5ca320d449" Oct 06 15:01:16 crc kubenswrapper[4757]: E1006 15:01:16.303066 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4514b567d73b3f73b637b17918f148b353d48d9811720eb1a8d54b5ca320d449\": container with ID starting with 4514b567d73b3f73b637b17918f148b353d48d9811720eb1a8d54b5ca320d449 not found: ID does not exist" containerID="4514b567d73b3f73b637b17918f148b353d48d9811720eb1a8d54b5ca320d449" Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.303190 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4514b567d73b3f73b637b17918f148b353d48d9811720eb1a8d54b5ca320d449"} err="failed to get container status \"4514b567d73b3f73b637b17918f148b353d48d9811720eb1a8d54b5ca320d449\": rpc error: code = NotFound desc = could not find container \"4514b567d73b3f73b637b17918f148b353d48d9811720eb1a8d54b5ca320d449\": container with ID starting with 4514b567d73b3f73b637b17918f148b353d48d9811720eb1a8d54b5ca320d449 not found: ID does not exist" Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.428984 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-e94b737f-eafe-4f37-93ee-0568a7d5228a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e94b737f-eafe-4f37-93ee-0568a7d5228a\") pod \"rabbitmq-server-0\" (UID: \"881e264b-7f54-49d0-8e26-c7eda5e6ab5a\") " pod="openstack/rabbitmq-server-0" Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.429032 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/881e264b-7f54-49d0-8e26-c7eda5e6ab5a-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"881e264b-7f54-49d0-8e26-c7eda5e6ab5a\") " pod="openstack/rabbitmq-server-0" Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.429088 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qtk48\" (UniqueName: \"kubernetes.io/projected/881e264b-7f54-49d0-8e26-c7eda5e6ab5a-kube-api-access-qtk48\") pod \"rabbitmq-server-0\" (UID: \"881e264b-7f54-49d0-8e26-c7eda5e6ab5a\") " pod="openstack/rabbitmq-server-0" Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.429177 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/881e264b-7f54-49d0-8e26-c7eda5e6ab5a-config-data\") pod \"rabbitmq-server-0\" (UID: \"881e264b-7f54-49d0-8e26-c7eda5e6ab5a\") " pod="openstack/rabbitmq-server-0" Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.429213 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/881e264b-7f54-49d0-8e26-c7eda5e6ab5a-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"881e264b-7f54-49d0-8e26-c7eda5e6ab5a\") " pod="openstack/rabbitmq-server-0" Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.429269 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/881e264b-7f54-49d0-8e26-c7eda5e6ab5a-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"881e264b-7f54-49d0-8e26-c7eda5e6ab5a\") " pod="openstack/rabbitmq-server-0" Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.429294 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/881e264b-7f54-49d0-8e26-c7eda5e6ab5a-pod-info\") pod \"rabbitmq-server-0\" (UID: \"881e264b-7f54-49d0-8e26-c7eda5e6ab5a\") " pod="openstack/rabbitmq-server-0" Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.429314 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/881e264b-7f54-49d0-8e26-c7eda5e6ab5a-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"881e264b-7f54-49d0-8e26-c7eda5e6ab5a\") " pod="openstack/rabbitmq-server-0" Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.429328 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/881e264b-7f54-49d0-8e26-c7eda5e6ab5a-server-conf\") pod \"rabbitmq-server-0\" (UID: \"881e264b-7f54-49d0-8e26-c7eda5e6ab5a\") " pod="openstack/rabbitmq-server-0" Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.429346 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/881e264b-7f54-49d0-8e26-c7eda5e6ab5a-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"881e264b-7f54-49d0-8e26-c7eda5e6ab5a\") " pod="openstack/rabbitmq-server-0" Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.429486 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/881e264b-7f54-49d0-8e26-c7eda5e6ab5a-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"881e264b-7f54-49d0-8e26-c7eda5e6ab5a\") " pod="openstack/rabbitmq-server-0" Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.485036 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.530941 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/881e264b-7f54-49d0-8e26-c7eda5e6ab5a-config-data\") pod \"rabbitmq-server-0\" (UID: \"881e264b-7f54-49d0-8e26-c7eda5e6ab5a\") " pod="openstack/rabbitmq-server-0" Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.531021 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/881e264b-7f54-49d0-8e26-c7eda5e6ab5a-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"881e264b-7f54-49d0-8e26-c7eda5e6ab5a\") " pod="openstack/rabbitmq-server-0" Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.531066 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/881e264b-7f54-49d0-8e26-c7eda5e6ab5a-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"881e264b-7f54-49d0-8e26-c7eda5e6ab5a\") " pod="openstack/rabbitmq-server-0" Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.531141 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/881e264b-7f54-49d0-8e26-c7eda5e6ab5a-pod-info\") pod \"rabbitmq-server-0\" (UID: \"881e264b-7f54-49d0-8e26-c7eda5e6ab5a\") " pod="openstack/rabbitmq-server-0" Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.531175 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/881e264b-7f54-49d0-8e26-c7eda5e6ab5a-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"881e264b-7f54-49d0-8e26-c7eda5e6ab5a\") " pod="openstack/rabbitmq-server-0" Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.531207 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/881e264b-7f54-49d0-8e26-c7eda5e6ab5a-server-conf\") pod \"rabbitmq-server-0\" (UID: \"881e264b-7f54-49d0-8e26-c7eda5e6ab5a\") " pod="openstack/rabbitmq-server-0" Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.531241 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/881e264b-7f54-49d0-8e26-c7eda5e6ab5a-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"881e264b-7f54-49d0-8e26-c7eda5e6ab5a\") " pod="openstack/rabbitmq-server-0" Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.531277 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/881e264b-7f54-49d0-8e26-c7eda5e6ab5a-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"881e264b-7f54-49d0-8e26-c7eda5e6ab5a\") " pod="openstack/rabbitmq-server-0" Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.531399 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-e94b737f-eafe-4f37-93ee-0568a7d5228a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e94b737f-eafe-4f37-93ee-0568a7d5228a\") pod \"rabbitmq-server-0\" (UID: \"881e264b-7f54-49d0-8e26-c7eda5e6ab5a\") " pod="openstack/rabbitmq-server-0" Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.531452 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/881e264b-7f54-49d0-8e26-c7eda5e6ab5a-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"881e264b-7f54-49d0-8e26-c7eda5e6ab5a\") " pod="openstack/rabbitmq-server-0" Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.531523 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qtk48\" (UniqueName: \"kubernetes.io/projected/881e264b-7f54-49d0-8e26-c7eda5e6ab5a-kube-api-access-qtk48\") pod \"rabbitmq-server-0\" (UID: \"881e264b-7f54-49d0-8e26-c7eda5e6ab5a\") " pod="openstack/rabbitmq-server-0" Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.532026 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/881e264b-7f54-49d0-8e26-c7eda5e6ab5a-config-data\") pod \"rabbitmq-server-0\" (UID: \"881e264b-7f54-49d0-8e26-c7eda5e6ab5a\") " pod="openstack/rabbitmq-server-0" Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.532368 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/881e264b-7f54-49d0-8e26-c7eda5e6ab5a-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"881e264b-7f54-49d0-8e26-c7eda5e6ab5a\") " pod="openstack/rabbitmq-server-0" Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.533925 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/881e264b-7f54-49d0-8e26-c7eda5e6ab5a-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"881e264b-7f54-49d0-8e26-c7eda5e6ab5a\") " pod="openstack/rabbitmq-server-0" Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.536925 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/881e264b-7f54-49d0-8e26-c7eda5e6ab5a-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"881e264b-7f54-49d0-8e26-c7eda5e6ab5a\") " pod="openstack/rabbitmq-server-0" Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.538320 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/881e264b-7f54-49d0-8e26-c7eda5e6ab5a-server-conf\") pod \"rabbitmq-server-0\" (UID: \"881e264b-7f54-49d0-8e26-c7eda5e6ab5a\") " pod="openstack/rabbitmq-server-0" Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.541567 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/881e264b-7f54-49d0-8e26-c7eda5e6ab5a-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"881e264b-7f54-49d0-8e26-c7eda5e6ab5a\") " pod="openstack/rabbitmq-server-0" Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.542148 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/881e264b-7f54-49d0-8e26-c7eda5e6ab5a-pod-info\") pod \"rabbitmq-server-0\" (UID: \"881e264b-7f54-49d0-8e26-c7eda5e6ab5a\") " pod="openstack/rabbitmq-server-0" Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.546805 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/881e264b-7f54-49d0-8e26-c7eda5e6ab5a-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"881e264b-7f54-49d0-8e26-c7eda5e6ab5a\") " pod="openstack/rabbitmq-server-0" Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.547554 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/881e264b-7f54-49d0-8e26-c7eda5e6ab5a-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"881e264b-7f54-49d0-8e26-c7eda5e6ab5a\") " pod="openstack/rabbitmq-server-0" Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.551182 4757 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.551232 4757 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-e94b737f-eafe-4f37-93ee-0568a7d5228a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e94b737f-eafe-4f37-93ee-0568a7d5228a\") pod \"rabbitmq-server-0\" (UID: \"881e264b-7f54-49d0-8e26-c7eda5e6ab5a\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/3cb94c976d07b4f2fb2ae0d8389c4b97a0c7ef82bc9dd694a37eee44b292797e/globalmount\"" pod="openstack/rabbitmq-server-0" Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.563237 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qtk48\" (UniqueName: \"kubernetes.io/projected/881e264b-7f54-49d0-8e26-c7eda5e6ab5a-kube-api-access-qtk48\") pod \"rabbitmq-server-0\" (UID: \"881e264b-7f54-49d0-8e26-c7eda5e6ab5a\") " pod="openstack/rabbitmq-server-0" Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.589015 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-e94b737f-eafe-4f37-93ee-0568a7d5228a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e94b737f-eafe-4f37-93ee-0568a7d5228a\") pod \"rabbitmq-server-0\" (UID: \"881e264b-7f54-49d0-8e26-c7eda5e6ab5a\") " pod="openstack/rabbitmq-server-0" Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.633010 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-cbc005a5-9898-4ef1-98dc-4fdf4e9aa648\") pod \"2d9fa0d1-9bdd-44f2-aa80-0037e7249d51\" (UID: \"2d9fa0d1-9bdd-44f2-aa80-0037e7249d51\") " Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.633055 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/2d9fa0d1-9bdd-44f2-aa80-0037e7249d51-pod-info\") pod \"2d9fa0d1-9bdd-44f2-aa80-0037e7249d51\" (UID: \"2d9fa0d1-9bdd-44f2-aa80-0037e7249d51\") " Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.633086 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/2d9fa0d1-9bdd-44f2-aa80-0037e7249d51-server-conf\") pod \"2d9fa0d1-9bdd-44f2-aa80-0037e7249d51\" (UID: \"2d9fa0d1-9bdd-44f2-aa80-0037e7249d51\") " Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.633153 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/2d9fa0d1-9bdd-44f2-aa80-0037e7249d51-rabbitmq-erlang-cookie\") pod \"2d9fa0d1-9bdd-44f2-aa80-0037e7249d51\" (UID: \"2d9fa0d1-9bdd-44f2-aa80-0037e7249d51\") " Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.633183 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/2d9fa0d1-9bdd-44f2-aa80-0037e7249d51-rabbitmq-plugins\") pod \"2d9fa0d1-9bdd-44f2-aa80-0037e7249d51\" (UID: \"2d9fa0d1-9bdd-44f2-aa80-0037e7249d51\") " Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.633211 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dtwzm\" (UniqueName: \"kubernetes.io/projected/2d9fa0d1-9bdd-44f2-aa80-0037e7249d51-kube-api-access-dtwzm\") pod \"2d9fa0d1-9bdd-44f2-aa80-0037e7249d51\" (UID: \"2d9fa0d1-9bdd-44f2-aa80-0037e7249d51\") " Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.633232 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/2d9fa0d1-9bdd-44f2-aa80-0037e7249d51-plugins-conf\") pod \"2d9fa0d1-9bdd-44f2-aa80-0037e7249d51\" (UID: \"2d9fa0d1-9bdd-44f2-aa80-0037e7249d51\") " Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.633682 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2d9fa0d1-9bdd-44f2-aa80-0037e7249d51-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "2d9fa0d1-9bdd-44f2-aa80-0037e7249d51" (UID: "2d9fa0d1-9bdd-44f2-aa80-0037e7249d51"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.633726 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/2d9fa0d1-9bdd-44f2-aa80-0037e7249d51-rabbitmq-tls\") pod \"2d9fa0d1-9bdd-44f2-aa80-0037e7249d51\" (UID: \"2d9fa0d1-9bdd-44f2-aa80-0037e7249d51\") " Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.633756 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/2d9fa0d1-9bdd-44f2-aa80-0037e7249d51-rabbitmq-confd\") pod \"2d9fa0d1-9bdd-44f2-aa80-0037e7249d51\" (UID: \"2d9fa0d1-9bdd-44f2-aa80-0037e7249d51\") " Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.633782 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2d9fa0d1-9bdd-44f2-aa80-0037e7249d51-config-data\") pod \"2d9fa0d1-9bdd-44f2-aa80-0037e7249d51\" (UID: \"2d9fa0d1-9bdd-44f2-aa80-0037e7249d51\") " Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.633828 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/2d9fa0d1-9bdd-44f2-aa80-0037e7249d51-erlang-cookie-secret\") pod \"2d9fa0d1-9bdd-44f2-aa80-0037e7249d51\" (UID: \"2d9fa0d1-9bdd-44f2-aa80-0037e7249d51\") " Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.634148 4757 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/2d9fa0d1-9bdd-44f2-aa80-0037e7249d51-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.634194 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2d9fa0d1-9bdd-44f2-aa80-0037e7249d51-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "2d9fa0d1-9bdd-44f2-aa80-0037e7249d51" (UID: "2d9fa0d1-9bdd-44f2-aa80-0037e7249d51"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.634918 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2d9fa0d1-9bdd-44f2-aa80-0037e7249d51-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "2d9fa0d1-9bdd-44f2-aa80-0037e7249d51" (UID: "2d9fa0d1-9bdd-44f2-aa80-0037e7249d51"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.636818 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/2d9fa0d1-9bdd-44f2-aa80-0037e7249d51-pod-info" (OuterVolumeSpecName: "pod-info") pod "2d9fa0d1-9bdd-44f2-aa80-0037e7249d51" (UID: "2d9fa0d1-9bdd-44f2-aa80-0037e7249d51"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.637112 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2d9fa0d1-9bdd-44f2-aa80-0037e7249d51-kube-api-access-dtwzm" (OuterVolumeSpecName: "kube-api-access-dtwzm") pod "2d9fa0d1-9bdd-44f2-aa80-0037e7249d51" (UID: "2d9fa0d1-9bdd-44f2-aa80-0037e7249d51"). InnerVolumeSpecName "kube-api-access-dtwzm". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.637501 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2d9fa0d1-9bdd-44f2-aa80-0037e7249d51-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "2d9fa0d1-9bdd-44f2-aa80-0037e7249d51" (UID: "2d9fa0d1-9bdd-44f2-aa80-0037e7249d51"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.638996 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2d9fa0d1-9bdd-44f2-aa80-0037e7249d51-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "2d9fa0d1-9bdd-44f2-aa80-0037e7249d51" (UID: "2d9fa0d1-9bdd-44f2-aa80-0037e7249d51"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.653773 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-cbc005a5-9898-4ef1-98dc-4fdf4e9aa648" (OuterVolumeSpecName: "persistence") pod "2d9fa0d1-9bdd-44f2-aa80-0037e7249d51" (UID: "2d9fa0d1-9bdd-44f2-aa80-0037e7249d51"). InnerVolumeSpecName "pvc-cbc005a5-9898-4ef1-98dc-4fdf4e9aa648". PluginName "kubernetes.io/csi", VolumeGidValue "" Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.657587 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2d9fa0d1-9bdd-44f2-aa80-0037e7249d51-config-data" (OuterVolumeSpecName: "config-data") pod "2d9fa0d1-9bdd-44f2-aa80-0037e7249d51" (UID: "2d9fa0d1-9bdd-44f2-aa80-0037e7249d51"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.681611 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2d9fa0d1-9bdd-44f2-aa80-0037e7249d51-server-conf" (OuterVolumeSpecName: "server-conf") pod "2d9fa0d1-9bdd-44f2-aa80-0037e7249d51" (UID: "2d9fa0d1-9bdd-44f2-aa80-0037e7249d51"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.706053 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2d9fa0d1-9bdd-44f2-aa80-0037e7249d51-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "2d9fa0d1-9bdd-44f2-aa80-0037e7249d51" (UID: "2d9fa0d1-9bdd-44f2-aa80-0037e7249d51"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.736316 4757 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/2d9fa0d1-9bdd-44f2-aa80-0037e7249d51-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.736371 4757 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2d9fa0d1-9bdd-44f2-aa80-0037e7249d51-config-data\") on node \"crc\" DevicePath \"\"" Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.736390 4757 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/2d9fa0d1-9bdd-44f2-aa80-0037e7249d51-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.736466 4757 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-cbc005a5-9898-4ef1-98dc-4fdf4e9aa648\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-cbc005a5-9898-4ef1-98dc-4fdf4e9aa648\") on node \"crc\" " Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.736492 4757 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/2d9fa0d1-9bdd-44f2-aa80-0037e7249d51-pod-info\") on node \"crc\" DevicePath \"\"" Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.737313 4757 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/2d9fa0d1-9bdd-44f2-aa80-0037e7249d51-server-conf\") on node \"crc\" DevicePath \"\"" Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.737333 4757 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/2d9fa0d1-9bdd-44f2-aa80-0037e7249d51-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.737351 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dtwzm\" (UniqueName: \"kubernetes.io/projected/2d9fa0d1-9bdd-44f2-aa80-0037e7249d51-kube-api-access-dtwzm\") on node \"crc\" DevicePath \"\"" Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.737365 4757 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/2d9fa0d1-9bdd-44f2-aa80-0037e7249d51-plugins-conf\") on node \"crc\" DevicePath \"\"" Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.737377 4757 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/2d9fa0d1-9bdd-44f2-aa80-0037e7249d51-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.767932 4757 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.768185 4757 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-cbc005a5-9898-4ef1-98dc-4fdf4e9aa648" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-cbc005a5-9898-4ef1-98dc-4fdf4e9aa648") on node "crc" Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.839290 4757 reconciler_common.go:293] "Volume detached for volume \"pvc-cbc005a5-9898-4ef1-98dc-4fdf4e9aa648\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-cbc005a5-9898-4ef1-98dc-4fdf4e9aa648\") on node \"crc\" DevicePath \"\"" Oct 06 15:01:16 crc kubenswrapper[4757]: I1006 15:01:16.866070 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 06 15:01:17 crc kubenswrapper[4757]: I1006 15:01:17.147639 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 06 15:01:17 crc kubenswrapper[4757]: I1006 15:01:17.154752 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"2d9fa0d1-9bdd-44f2-aa80-0037e7249d51","Type":"ContainerDied","Data":"d0981f7274cbfd12f39272ff0c618dc26726edef92561711c24b440b1e642cf2"} Oct 06 15:01:17 crc kubenswrapper[4757]: I1006 15:01:17.154806 4757 scope.go:117] "RemoveContainer" containerID="cdbf66a122d8d0d2ccec5182d632f2b165511b0fd732f2d33a4df51e4425ac83" Oct 06 15:01:17 crc kubenswrapper[4757]: I1006 15:01:17.154909 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 06 15:01:17 crc kubenswrapper[4757]: I1006 15:01:17.226195 4757 scope.go:117] "RemoveContainer" containerID="a872d004b4d06df5aa1bdcccab07ae0d9ebf46963a551bddc81a482db28c1328" Oct 06 15:01:17 crc kubenswrapper[4757]: I1006 15:01:17.245731 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 06 15:01:17 crc kubenswrapper[4757]: I1006 15:01:17.251424 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 06 15:01:17 crc kubenswrapper[4757]: I1006 15:01:17.274935 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 06 15:01:17 crc kubenswrapper[4757]: E1006 15:01:17.275376 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2d9fa0d1-9bdd-44f2-aa80-0037e7249d51" containerName="setup-container" Oct 06 15:01:17 crc kubenswrapper[4757]: I1006 15:01:17.275403 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="2d9fa0d1-9bdd-44f2-aa80-0037e7249d51" containerName="setup-container" Oct 06 15:01:17 crc kubenswrapper[4757]: E1006 15:01:17.275425 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2d9fa0d1-9bdd-44f2-aa80-0037e7249d51" containerName="rabbitmq" Oct 06 15:01:17 crc kubenswrapper[4757]: I1006 15:01:17.275435 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="2d9fa0d1-9bdd-44f2-aa80-0037e7249d51" containerName="rabbitmq" Oct 06 15:01:17 crc kubenswrapper[4757]: I1006 15:01:17.275617 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="2d9fa0d1-9bdd-44f2-aa80-0037e7249d51" containerName="rabbitmq" Oct 06 15:01:17 crc kubenswrapper[4757]: I1006 15:01:17.276599 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 06 15:01:17 crc kubenswrapper[4757]: I1006 15:01:17.283511 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Oct 06 15:01:17 crc kubenswrapper[4757]: I1006 15:01:17.283625 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Oct 06 15:01:17 crc kubenswrapper[4757]: I1006 15:01:17.283731 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Oct 06 15:01:17 crc kubenswrapper[4757]: I1006 15:01:17.283801 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Oct 06 15:01:17 crc kubenswrapper[4757]: I1006 15:01:17.284030 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Oct 06 15:01:17 crc kubenswrapper[4757]: I1006 15:01:17.284165 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Oct 06 15:01:17 crc kubenswrapper[4757]: I1006 15:01:17.284223 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-wdvcn" Oct 06 15:01:17 crc kubenswrapper[4757]: I1006 15:01:17.285423 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 06 15:01:17 crc kubenswrapper[4757]: I1006 15:01:17.450071 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/534874a5-63a4-4cd0-ab0d-27bea909d8a4-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"534874a5-63a4-4cd0-ab0d-27bea909d8a4\") " pod="openstack/rabbitmq-cell1-server-0" Oct 06 15:01:17 crc kubenswrapper[4757]: I1006 15:01:17.450154 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/534874a5-63a4-4cd0-ab0d-27bea909d8a4-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"534874a5-63a4-4cd0-ab0d-27bea909d8a4\") " pod="openstack/rabbitmq-cell1-server-0" Oct 06 15:01:17 crc kubenswrapper[4757]: I1006 15:01:17.450177 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/534874a5-63a4-4cd0-ab0d-27bea909d8a4-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"534874a5-63a4-4cd0-ab0d-27bea909d8a4\") " pod="openstack/rabbitmq-cell1-server-0" Oct 06 15:01:17 crc kubenswrapper[4757]: I1006 15:01:17.450206 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/534874a5-63a4-4cd0-ab0d-27bea909d8a4-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"534874a5-63a4-4cd0-ab0d-27bea909d8a4\") " pod="openstack/rabbitmq-cell1-server-0" Oct 06 15:01:17 crc kubenswrapper[4757]: I1006 15:01:17.450229 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/534874a5-63a4-4cd0-ab0d-27bea909d8a4-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"534874a5-63a4-4cd0-ab0d-27bea909d8a4\") " pod="openstack/rabbitmq-cell1-server-0" Oct 06 15:01:17 crc kubenswrapper[4757]: I1006 15:01:17.450394 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/534874a5-63a4-4cd0-ab0d-27bea909d8a4-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"534874a5-63a4-4cd0-ab0d-27bea909d8a4\") " pod="openstack/rabbitmq-cell1-server-0" Oct 06 15:01:17 crc kubenswrapper[4757]: I1006 15:01:17.450457 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-cbc005a5-9898-4ef1-98dc-4fdf4e9aa648\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-cbc005a5-9898-4ef1-98dc-4fdf4e9aa648\") pod \"rabbitmq-cell1-server-0\" (UID: \"534874a5-63a4-4cd0-ab0d-27bea909d8a4\") " pod="openstack/rabbitmq-cell1-server-0" Oct 06 15:01:17 crc kubenswrapper[4757]: I1006 15:01:17.450496 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/534874a5-63a4-4cd0-ab0d-27bea909d8a4-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"534874a5-63a4-4cd0-ab0d-27bea909d8a4\") " pod="openstack/rabbitmq-cell1-server-0" Oct 06 15:01:17 crc kubenswrapper[4757]: I1006 15:01:17.450749 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/534874a5-63a4-4cd0-ab0d-27bea909d8a4-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"534874a5-63a4-4cd0-ab0d-27bea909d8a4\") " pod="openstack/rabbitmq-cell1-server-0" Oct 06 15:01:17 crc kubenswrapper[4757]: I1006 15:01:17.450832 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/534874a5-63a4-4cd0-ab0d-27bea909d8a4-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"534874a5-63a4-4cd0-ab0d-27bea909d8a4\") " pod="openstack/rabbitmq-cell1-server-0" Oct 06 15:01:17 crc kubenswrapper[4757]: I1006 15:01:17.450880 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nm8bp\" (UniqueName: \"kubernetes.io/projected/534874a5-63a4-4cd0-ab0d-27bea909d8a4-kube-api-access-nm8bp\") pod \"rabbitmq-cell1-server-0\" (UID: \"534874a5-63a4-4cd0-ab0d-27bea909d8a4\") " pod="openstack/rabbitmq-cell1-server-0" Oct 06 15:01:17 crc kubenswrapper[4757]: I1006 15:01:17.553053 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/534874a5-63a4-4cd0-ab0d-27bea909d8a4-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"534874a5-63a4-4cd0-ab0d-27bea909d8a4\") " pod="openstack/rabbitmq-cell1-server-0" Oct 06 15:01:17 crc kubenswrapper[4757]: I1006 15:01:17.553177 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/534874a5-63a4-4cd0-ab0d-27bea909d8a4-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"534874a5-63a4-4cd0-ab0d-27bea909d8a4\") " pod="openstack/rabbitmq-cell1-server-0" Oct 06 15:01:17 crc kubenswrapper[4757]: I1006 15:01:17.553212 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/534874a5-63a4-4cd0-ab0d-27bea909d8a4-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"534874a5-63a4-4cd0-ab0d-27bea909d8a4\") " pod="openstack/rabbitmq-cell1-server-0" Oct 06 15:01:17 crc kubenswrapper[4757]: I1006 15:01:17.553261 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/534874a5-63a4-4cd0-ab0d-27bea909d8a4-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"534874a5-63a4-4cd0-ab0d-27bea909d8a4\") " pod="openstack/rabbitmq-cell1-server-0" Oct 06 15:01:17 crc kubenswrapper[4757]: I1006 15:01:17.553297 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/534874a5-63a4-4cd0-ab0d-27bea909d8a4-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"534874a5-63a4-4cd0-ab0d-27bea909d8a4\") " pod="openstack/rabbitmq-cell1-server-0" Oct 06 15:01:17 crc kubenswrapper[4757]: I1006 15:01:17.553353 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/534874a5-63a4-4cd0-ab0d-27bea909d8a4-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"534874a5-63a4-4cd0-ab0d-27bea909d8a4\") " pod="openstack/rabbitmq-cell1-server-0" Oct 06 15:01:17 crc kubenswrapper[4757]: I1006 15:01:17.553399 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-cbc005a5-9898-4ef1-98dc-4fdf4e9aa648\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-cbc005a5-9898-4ef1-98dc-4fdf4e9aa648\") pod \"rabbitmq-cell1-server-0\" (UID: \"534874a5-63a4-4cd0-ab0d-27bea909d8a4\") " pod="openstack/rabbitmq-cell1-server-0" Oct 06 15:01:17 crc kubenswrapper[4757]: I1006 15:01:17.553436 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/534874a5-63a4-4cd0-ab0d-27bea909d8a4-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"534874a5-63a4-4cd0-ab0d-27bea909d8a4\") " pod="openstack/rabbitmq-cell1-server-0" Oct 06 15:01:17 crc kubenswrapper[4757]: I1006 15:01:17.553529 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/534874a5-63a4-4cd0-ab0d-27bea909d8a4-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"534874a5-63a4-4cd0-ab0d-27bea909d8a4\") " pod="openstack/rabbitmq-cell1-server-0" Oct 06 15:01:17 crc kubenswrapper[4757]: I1006 15:01:17.553570 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/534874a5-63a4-4cd0-ab0d-27bea909d8a4-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"534874a5-63a4-4cd0-ab0d-27bea909d8a4\") " pod="openstack/rabbitmq-cell1-server-0" Oct 06 15:01:17 crc kubenswrapper[4757]: I1006 15:01:17.553608 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nm8bp\" (UniqueName: \"kubernetes.io/projected/534874a5-63a4-4cd0-ab0d-27bea909d8a4-kube-api-access-nm8bp\") pod \"rabbitmq-cell1-server-0\" (UID: \"534874a5-63a4-4cd0-ab0d-27bea909d8a4\") " pod="openstack/rabbitmq-cell1-server-0" Oct 06 15:01:17 crc kubenswrapper[4757]: I1006 15:01:17.553993 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/534874a5-63a4-4cd0-ab0d-27bea909d8a4-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"534874a5-63a4-4cd0-ab0d-27bea909d8a4\") " pod="openstack/rabbitmq-cell1-server-0" Oct 06 15:01:17 crc kubenswrapper[4757]: I1006 15:01:17.554885 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/534874a5-63a4-4cd0-ab0d-27bea909d8a4-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"534874a5-63a4-4cd0-ab0d-27bea909d8a4\") " pod="openstack/rabbitmq-cell1-server-0" Oct 06 15:01:17 crc kubenswrapper[4757]: I1006 15:01:17.555452 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/534874a5-63a4-4cd0-ab0d-27bea909d8a4-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"534874a5-63a4-4cd0-ab0d-27bea909d8a4\") " pod="openstack/rabbitmq-cell1-server-0" Oct 06 15:01:17 crc kubenswrapper[4757]: I1006 15:01:17.556270 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/534874a5-63a4-4cd0-ab0d-27bea909d8a4-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"534874a5-63a4-4cd0-ab0d-27bea909d8a4\") " pod="openstack/rabbitmq-cell1-server-0" Oct 06 15:01:17 crc kubenswrapper[4757]: I1006 15:01:17.556685 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/534874a5-63a4-4cd0-ab0d-27bea909d8a4-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"534874a5-63a4-4cd0-ab0d-27bea909d8a4\") " pod="openstack/rabbitmq-cell1-server-0" Oct 06 15:01:17 crc kubenswrapper[4757]: I1006 15:01:17.560430 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/534874a5-63a4-4cd0-ab0d-27bea909d8a4-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"534874a5-63a4-4cd0-ab0d-27bea909d8a4\") " pod="openstack/rabbitmq-cell1-server-0" Oct 06 15:01:17 crc kubenswrapper[4757]: I1006 15:01:17.560513 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/534874a5-63a4-4cd0-ab0d-27bea909d8a4-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"534874a5-63a4-4cd0-ab0d-27bea909d8a4\") " pod="openstack/rabbitmq-cell1-server-0" Oct 06 15:01:17 crc kubenswrapper[4757]: I1006 15:01:17.561612 4757 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Oct 06 15:01:17 crc kubenswrapper[4757]: I1006 15:01:17.561667 4757 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-cbc005a5-9898-4ef1-98dc-4fdf4e9aa648\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-cbc005a5-9898-4ef1-98dc-4fdf4e9aa648\") pod \"rabbitmq-cell1-server-0\" (UID: \"534874a5-63a4-4cd0-ab0d-27bea909d8a4\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/41db9b64eefef90e622c0e26edee1493aa233875f0edd02f5e522320b4849b35/globalmount\"" pod="openstack/rabbitmq-cell1-server-0" Oct 06 15:01:17 crc kubenswrapper[4757]: I1006 15:01:17.561665 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/534874a5-63a4-4cd0-ab0d-27bea909d8a4-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"534874a5-63a4-4cd0-ab0d-27bea909d8a4\") " pod="openstack/rabbitmq-cell1-server-0" Oct 06 15:01:17 crc kubenswrapper[4757]: I1006 15:01:17.562058 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/534874a5-63a4-4cd0-ab0d-27bea909d8a4-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"534874a5-63a4-4cd0-ab0d-27bea909d8a4\") " pod="openstack/rabbitmq-cell1-server-0" Oct 06 15:01:17 crc kubenswrapper[4757]: I1006 15:01:17.575236 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nm8bp\" (UniqueName: \"kubernetes.io/projected/534874a5-63a4-4cd0-ab0d-27bea909d8a4-kube-api-access-nm8bp\") pod \"rabbitmq-cell1-server-0\" (UID: \"534874a5-63a4-4cd0-ab0d-27bea909d8a4\") " pod="openstack/rabbitmq-cell1-server-0" Oct 06 15:01:17 crc kubenswrapper[4757]: I1006 15:01:17.609582 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-cbc005a5-9898-4ef1-98dc-4fdf4e9aa648\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-cbc005a5-9898-4ef1-98dc-4fdf4e9aa648\") pod \"rabbitmq-cell1-server-0\" (UID: \"534874a5-63a4-4cd0-ab0d-27bea909d8a4\") " pod="openstack/rabbitmq-cell1-server-0" Oct 06 15:01:17 crc kubenswrapper[4757]: I1006 15:01:17.901021 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 06 15:01:18 crc kubenswrapper[4757]: I1006 15:01:18.164944 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"881e264b-7f54-49d0-8e26-c7eda5e6ab5a","Type":"ContainerStarted","Data":"99875303d5a2d5270e2d2be844bbcb5f738672c1e6864a09990b47b03968addd"} Oct 06 15:01:18 crc kubenswrapper[4757]: I1006 15:01:18.193699 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2d9fa0d1-9bdd-44f2-aa80-0037e7249d51" path="/var/lib/kubelet/pods/2d9fa0d1-9bdd-44f2-aa80-0037e7249d51/volumes" Oct 06 15:01:18 crc kubenswrapper[4757]: I1006 15:01:18.195240 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7ee79e18-42a9-4a45-81e7-5a0ca8a6a841" path="/var/lib/kubelet/pods/7ee79e18-42a9-4a45-81e7-5a0ca8a6a841/volumes" Oct 06 15:01:18 crc kubenswrapper[4757]: I1006 15:01:18.371813 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 06 15:01:18 crc kubenswrapper[4757]: W1006 15:01:18.378424 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod534874a5_63a4_4cd0_ab0d_27bea909d8a4.slice/crio-6ebeee898922ba03acfafeb942f54788c2304bb28c0c48731bc757478e259106 WatchSource:0}: Error finding container 6ebeee898922ba03acfafeb942f54788c2304bb28c0c48731bc757478e259106: Status 404 returned error can't find the container with id 6ebeee898922ba03acfafeb942f54788c2304bb28c0c48731bc757478e259106 Oct 06 15:01:19 crc kubenswrapper[4757]: I1006 15:01:19.178335 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"881e264b-7f54-49d0-8e26-c7eda5e6ab5a","Type":"ContainerStarted","Data":"51e02138e8e302cb7aa900160756c08e79fb62d4215b34f37c1e36aeaa76c13f"} Oct 06 15:01:19 crc kubenswrapper[4757]: I1006 15:01:19.182473 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"534874a5-63a4-4cd0-ab0d-27bea909d8a4","Type":"ContainerStarted","Data":"6ebeee898922ba03acfafeb942f54788c2304bb28c0c48731bc757478e259106"} Oct 06 15:01:20 crc kubenswrapper[4757]: I1006 15:01:20.200882 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"534874a5-63a4-4cd0-ab0d-27bea909d8a4","Type":"ContainerStarted","Data":"c6cbe5de8cca3600c686bd4554826f74de51353e88eb1929c72e87ab13004c9d"} Oct 06 15:01:34 crc kubenswrapper[4757]: I1006 15:01:34.361468 4757 patch_prober.go:28] interesting pod/machine-config-daemon-7tb7h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 06 15:01:34 crc kubenswrapper[4757]: I1006 15:01:34.362170 4757 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 06 15:01:34 crc kubenswrapper[4757]: I1006 15:01:34.362230 4757 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" Oct 06 15:01:34 crc kubenswrapper[4757]: I1006 15:01:34.362968 4757 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"5c4563212ce072d71c658e00c8096ab0ddad498c69fa13e6bee8d2a76ec2f125"} pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 06 15:01:34 crc kubenswrapper[4757]: I1006 15:01:34.363038 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" containerName="machine-config-daemon" containerID="cri-o://5c4563212ce072d71c658e00c8096ab0ddad498c69fa13e6bee8d2a76ec2f125" gracePeriod=600 Oct 06 15:01:34 crc kubenswrapper[4757]: E1006 15:01:34.488153 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 15:01:35 crc kubenswrapper[4757]: I1006 15:01:35.331980 4757 generic.go:334] "Generic (PLEG): container finished" podID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" containerID="5c4563212ce072d71c658e00c8096ab0ddad498c69fa13e6bee8d2a76ec2f125" exitCode=0 Oct 06 15:01:35 crc kubenswrapper[4757]: I1006 15:01:35.332025 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" event={"ID":"0010c888-d5ad-4b2b-8309-1647fdf0dee3","Type":"ContainerDied","Data":"5c4563212ce072d71c658e00c8096ab0ddad498c69fa13e6bee8d2a76ec2f125"} Oct 06 15:01:35 crc kubenswrapper[4757]: I1006 15:01:35.332339 4757 scope.go:117] "RemoveContainer" containerID="bedffa6bb0c8f2dc43b319e7042746c9f10fca403f3f6e4886f8e811128a83a1" Oct 06 15:01:35 crc kubenswrapper[4757]: I1006 15:01:35.332909 4757 scope.go:117] "RemoveContainer" containerID="5c4563212ce072d71c658e00c8096ab0ddad498c69fa13e6bee8d2a76ec2f125" Oct 06 15:01:35 crc kubenswrapper[4757]: E1006 15:01:35.333297 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 15:01:46 crc kubenswrapper[4757]: I1006 15:01:46.180291 4757 scope.go:117] "RemoveContainer" containerID="5c4563212ce072d71c658e00c8096ab0ddad498c69fa13e6bee8d2a76ec2f125" Oct 06 15:01:46 crc kubenswrapper[4757]: E1006 15:01:46.181230 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 15:01:52 crc kubenswrapper[4757]: I1006 15:01:52.505209 4757 generic.go:334] "Generic (PLEG): container finished" podID="881e264b-7f54-49d0-8e26-c7eda5e6ab5a" containerID="51e02138e8e302cb7aa900160756c08e79fb62d4215b34f37c1e36aeaa76c13f" exitCode=0 Oct 06 15:01:52 crc kubenswrapper[4757]: I1006 15:01:52.505285 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"881e264b-7f54-49d0-8e26-c7eda5e6ab5a","Type":"ContainerDied","Data":"51e02138e8e302cb7aa900160756c08e79fb62d4215b34f37c1e36aeaa76c13f"} Oct 06 15:01:53 crc kubenswrapper[4757]: I1006 15:01:53.517081 4757 generic.go:334] "Generic (PLEG): container finished" podID="534874a5-63a4-4cd0-ab0d-27bea909d8a4" containerID="c6cbe5de8cca3600c686bd4554826f74de51353e88eb1929c72e87ab13004c9d" exitCode=0 Oct 06 15:01:53 crc kubenswrapper[4757]: I1006 15:01:53.517155 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"534874a5-63a4-4cd0-ab0d-27bea909d8a4","Type":"ContainerDied","Data":"c6cbe5de8cca3600c686bd4554826f74de51353e88eb1929c72e87ab13004c9d"} Oct 06 15:01:53 crc kubenswrapper[4757]: I1006 15:01:53.521650 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"881e264b-7f54-49d0-8e26-c7eda5e6ab5a","Type":"ContainerStarted","Data":"a483d6309a0f42f3944d35ad4e7d4de559440b525f76b4f7fb584808e512d7d2"} Oct 06 15:01:53 crc kubenswrapper[4757]: I1006 15:01:53.521885 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Oct 06 15:01:53 crc kubenswrapper[4757]: I1006 15:01:53.578122 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=37.578082855 podStartE2EDuration="37.578082855s" podCreationTimestamp="2025-10-06 15:01:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 15:01:53.573142584 +0000 UTC m=+5002.070461151" watchObservedRunningTime="2025-10-06 15:01:53.578082855 +0000 UTC m=+5002.075401392" Oct 06 15:01:54 crc kubenswrapper[4757]: I1006 15:01:54.532605 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"534874a5-63a4-4cd0-ab0d-27bea909d8a4","Type":"ContainerStarted","Data":"30f1bd9e7bd1e5a3cfad91b4d403df425751b72f21f12684912cdc4007b63347"} Oct 06 15:01:54 crc kubenswrapper[4757]: I1006 15:01:54.533240 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Oct 06 15:01:54 crc kubenswrapper[4757]: I1006 15:01:54.565803 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=37.565775911 podStartE2EDuration="37.565775911s" podCreationTimestamp="2025-10-06 15:01:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 15:01:54.557797613 +0000 UTC m=+5003.055116150" watchObservedRunningTime="2025-10-06 15:01:54.565775911 +0000 UTC m=+5003.063094448" Oct 06 15:02:00 crc kubenswrapper[4757]: I1006 15:02:00.180342 4757 scope.go:117] "RemoveContainer" containerID="5c4563212ce072d71c658e00c8096ab0ddad498c69fa13e6bee8d2a76ec2f125" Oct 06 15:02:00 crc kubenswrapper[4757]: E1006 15:02:00.181197 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 15:02:06 crc kubenswrapper[4757]: I1006 15:02:06.869314 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Oct 06 15:02:07 crc kubenswrapper[4757]: I1006 15:02:07.906413 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Oct 06 15:02:09 crc kubenswrapper[4757]: I1006 15:02:09.474772 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-1-default"] Oct 06 15:02:09 crc kubenswrapper[4757]: I1006 15:02:09.476356 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1-default" Oct 06 15:02:09 crc kubenswrapper[4757]: I1006 15:02:09.479023 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-s55mr" Oct 06 15:02:09 crc kubenswrapper[4757]: I1006 15:02:09.491505 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-1-default"] Oct 06 15:02:09 crc kubenswrapper[4757]: I1006 15:02:09.624320 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lbh8c\" (UniqueName: \"kubernetes.io/projected/ee370af0-17c3-4469-97dc-4b7ec3ad82d8-kube-api-access-lbh8c\") pod \"mariadb-client-1-default\" (UID: \"ee370af0-17c3-4469-97dc-4b7ec3ad82d8\") " pod="openstack/mariadb-client-1-default" Oct 06 15:02:09 crc kubenswrapper[4757]: I1006 15:02:09.726485 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lbh8c\" (UniqueName: \"kubernetes.io/projected/ee370af0-17c3-4469-97dc-4b7ec3ad82d8-kube-api-access-lbh8c\") pod \"mariadb-client-1-default\" (UID: \"ee370af0-17c3-4469-97dc-4b7ec3ad82d8\") " pod="openstack/mariadb-client-1-default" Oct 06 15:02:09 crc kubenswrapper[4757]: I1006 15:02:09.759870 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lbh8c\" (UniqueName: \"kubernetes.io/projected/ee370af0-17c3-4469-97dc-4b7ec3ad82d8-kube-api-access-lbh8c\") pod \"mariadb-client-1-default\" (UID: \"ee370af0-17c3-4469-97dc-4b7ec3ad82d8\") " pod="openstack/mariadb-client-1-default" Oct 06 15:02:09 crc kubenswrapper[4757]: I1006 15:02:09.807836 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1-default" Oct 06 15:02:10 crc kubenswrapper[4757]: I1006 15:02:10.166316 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-1-default"] Oct 06 15:02:10 crc kubenswrapper[4757]: I1006 15:02:10.666911 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-1-default" event={"ID":"ee370af0-17c3-4469-97dc-4b7ec3ad82d8","Type":"ContainerStarted","Data":"4cddabc84b73577ce4babe8d29821ef8ab94ec2ecb57200513e64d8afb3bb79d"} Oct 06 15:02:13 crc kubenswrapper[4757]: I1006 15:02:13.689627 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-1-default" event={"ID":"ee370af0-17c3-4469-97dc-4b7ec3ad82d8","Type":"ContainerStarted","Data":"fc3409671ee94fbb429d086b1463d5bf82f241d4bb508d5da676e1097f2ee1ef"} Oct 06 15:02:13 crc kubenswrapper[4757]: I1006 15:02:13.705180 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/mariadb-client-1-default" podStartSLOduration=1.411356683 podStartE2EDuration="4.70516041s" podCreationTimestamp="2025-10-06 15:02:09 +0000 UTC" firstStartedPulling="2025-10-06 15:02:10.167801496 +0000 UTC m=+5018.665120033" lastFinishedPulling="2025-10-06 15:02:13.461605223 +0000 UTC m=+5021.958923760" observedRunningTime="2025-10-06 15:02:13.700975596 +0000 UTC m=+5022.198294143" watchObservedRunningTime="2025-10-06 15:02:13.70516041 +0000 UTC m=+5022.202478967" Oct 06 15:02:13 crc kubenswrapper[4757]: I1006 15:02:13.755954 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-1-default_ee370af0-17c3-4469-97dc-4b7ec3ad82d8/mariadb-client-1-default/0.log" Oct 06 15:02:14 crc kubenswrapper[4757]: I1006 15:02:14.179736 4757 scope.go:117] "RemoveContainer" containerID="5c4563212ce072d71c658e00c8096ab0ddad498c69fa13e6bee8d2a76ec2f125" Oct 06 15:02:14 crc kubenswrapper[4757]: E1006 15:02:14.179990 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 15:02:14 crc kubenswrapper[4757]: I1006 15:02:14.701751 4757 generic.go:334] "Generic (PLEG): container finished" podID="ee370af0-17c3-4469-97dc-4b7ec3ad82d8" containerID="fc3409671ee94fbb429d086b1463d5bf82f241d4bb508d5da676e1097f2ee1ef" exitCode=0 Oct 06 15:02:14 crc kubenswrapper[4757]: I1006 15:02:14.701821 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-1-default" event={"ID":"ee370af0-17c3-4469-97dc-4b7ec3ad82d8","Type":"ContainerDied","Data":"fc3409671ee94fbb429d086b1463d5bf82f241d4bb508d5da676e1097f2ee1ef"} Oct 06 15:02:16 crc kubenswrapper[4757]: I1006 15:02:16.132549 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1-default" Oct 06 15:02:16 crc kubenswrapper[4757]: I1006 15:02:16.169410 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-1-default"] Oct 06 15:02:16 crc kubenswrapper[4757]: I1006 15:02:16.174844 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-1-default"] Oct 06 15:02:16 crc kubenswrapper[4757]: I1006 15:02:16.237332 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lbh8c\" (UniqueName: \"kubernetes.io/projected/ee370af0-17c3-4469-97dc-4b7ec3ad82d8-kube-api-access-lbh8c\") pod \"ee370af0-17c3-4469-97dc-4b7ec3ad82d8\" (UID: \"ee370af0-17c3-4469-97dc-4b7ec3ad82d8\") " Oct 06 15:02:16 crc kubenswrapper[4757]: I1006 15:02:16.244659 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ee370af0-17c3-4469-97dc-4b7ec3ad82d8-kube-api-access-lbh8c" (OuterVolumeSpecName: "kube-api-access-lbh8c") pod "ee370af0-17c3-4469-97dc-4b7ec3ad82d8" (UID: "ee370af0-17c3-4469-97dc-4b7ec3ad82d8"). InnerVolumeSpecName "kube-api-access-lbh8c". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 15:02:16 crc kubenswrapper[4757]: I1006 15:02:16.339822 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lbh8c\" (UniqueName: \"kubernetes.io/projected/ee370af0-17c3-4469-97dc-4b7ec3ad82d8-kube-api-access-lbh8c\") on node \"crc\" DevicePath \"\"" Oct 06 15:02:16 crc kubenswrapper[4757]: I1006 15:02:16.654242 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-2-default"] Oct 06 15:02:16 crc kubenswrapper[4757]: E1006 15:02:16.654918 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee370af0-17c3-4469-97dc-4b7ec3ad82d8" containerName="mariadb-client-1-default" Oct 06 15:02:16 crc kubenswrapper[4757]: I1006 15:02:16.654934 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee370af0-17c3-4469-97dc-4b7ec3ad82d8" containerName="mariadb-client-1-default" Oct 06 15:02:16 crc kubenswrapper[4757]: I1006 15:02:16.655072 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="ee370af0-17c3-4469-97dc-4b7ec3ad82d8" containerName="mariadb-client-1-default" Oct 06 15:02:16 crc kubenswrapper[4757]: I1006 15:02:16.655713 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2-default" Oct 06 15:02:16 crc kubenswrapper[4757]: I1006 15:02:16.661728 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-2-default"] Oct 06 15:02:16 crc kubenswrapper[4757]: I1006 15:02:16.724058 4757 scope.go:117] "RemoveContainer" containerID="fc3409671ee94fbb429d086b1463d5bf82f241d4bb508d5da676e1097f2ee1ef" Oct 06 15:02:16 crc kubenswrapper[4757]: I1006 15:02:16.724213 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1-default" Oct 06 15:02:16 crc kubenswrapper[4757]: I1006 15:02:16.747997 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nxllm\" (UniqueName: \"kubernetes.io/projected/d6d18bff-0421-4fe2-812a-73c9660fc78b-kube-api-access-nxllm\") pod \"mariadb-client-2-default\" (UID: \"d6d18bff-0421-4fe2-812a-73c9660fc78b\") " pod="openstack/mariadb-client-2-default" Oct 06 15:02:16 crc kubenswrapper[4757]: I1006 15:02:16.849452 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nxllm\" (UniqueName: \"kubernetes.io/projected/d6d18bff-0421-4fe2-812a-73c9660fc78b-kube-api-access-nxllm\") pod \"mariadb-client-2-default\" (UID: \"d6d18bff-0421-4fe2-812a-73c9660fc78b\") " pod="openstack/mariadb-client-2-default" Oct 06 15:02:16 crc kubenswrapper[4757]: I1006 15:02:16.867351 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nxllm\" (UniqueName: \"kubernetes.io/projected/d6d18bff-0421-4fe2-812a-73c9660fc78b-kube-api-access-nxllm\") pod \"mariadb-client-2-default\" (UID: \"d6d18bff-0421-4fe2-812a-73c9660fc78b\") " pod="openstack/mariadb-client-2-default" Oct 06 15:02:16 crc kubenswrapper[4757]: I1006 15:02:16.972607 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2-default" Oct 06 15:02:17 crc kubenswrapper[4757]: I1006 15:02:17.476700 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-2-default"] Oct 06 15:02:17 crc kubenswrapper[4757]: W1006 15:02:17.481032 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd6d18bff_0421_4fe2_812a_73c9660fc78b.slice/crio-b7c6a5cf974ec55db5785ae821b060a9a43bdfde57131b4877d1868d2a19d2b7 WatchSource:0}: Error finding container b7c6a5cf974ec55db5785ae821b060a9a43bdfde57131b4877d1868d2a19d2b7: Status 404 returned error can't find the container with id b7c6a5cf974ec55db5785ae821b060a9a43bdfde57131b4877d1868d2a19d2b7 Oct 06 15:02:17 crc kubenswrapper[4757]: I1006 15:02:17.732042 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-2-default" event={"ID":"d6d18bff-0421-4fe2-812a-73c9660fc78b","Type":"ContainerStarted","Data":"e03af2951b5d3267e8919e1b19c3f6655b6e0e4a06a8d2747494291ddd6dae20"} Oct 06 15:02:17 crc kubenswrapper[4757]: I1006 15:02:17.732111 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-2-default" event={"ID":"d6d18bff-0421-4fe2-812a-73c9660fc78b","Type":"ContainerStarted","Data":"b7c6a5cf974ec55db5785ae821b060a9a43bdfde57131b4877d1868d2a19d2b7"} Oct 06 15:02:17 crc kubenswrapper[4757]: I1006 15:02:17.756560 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/mariadb-client-2-default" podStartSLOduration=1.75651002 podStartE2EDuration="1.75651002s" podCreationTimestamp="2025-10-06 15:02:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 15:02:17.751009752 +0000 UTC m=+5026.248328309" watchObservedRunningTime="2025-10-06 15:02:17.75651002 +0000 UTC m=+5026.253828577" Oct 06 15:02:18 crc kubenswrapper[4757]: I1006 15:02:18.190683 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ee370af0-17c3-4469-97dc-4b7ec3ad82d8" path="/var/lib/kubelet/pods/ee370af0-17c3-4469-97dc-4b7ec3ad82d8/volumes" Oct 06 15:02:18 crc kubenswrapper[4757]: I1006 15:02:18.746759 4757 generic.go:334] "Generic (PLEG): container finished" podID="d6d18bff-0421-4fe2-812a-73c9660fc78b" containerID="e03af2951b5d3267e8919e1b19c3f6655b6e0e4a06a8d2747494291ddd6dae20" exitCode=0 Oct 06 15:02:18 crc kubenswrapper[4757]: I1006 15:02:18.746872 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-2-default" event={"ID":"d6d18bff-0421-4fe2-812a-73c9660fc78b","Type":"ContainerDied","Data":"e03af2951b5d3267e8919e1b19c3f6655b6e0e4a06a8d2747494291ddd6dae20"} Oct 06 15:02:20 crc kubenswrapper[4757]: I1006 15:02:20.210337 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2-default" Oct 06 15:02:20 crc kubenswrapper[4757]: I1006 15:02:20.248231 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-2-default"] Oct 06 15:02:20 crc kubenswrapper[4757]: I1006 15:02:20.253212 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-2-default"] Oct 06 15:02:20 crc kubenswrapper[4757]: I1006 15:02:20.306631 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nxllm\" (UniqueName: \"kubernetes.io/projected/d6d18bff-0421-4fe2-812a-73c9660fc78b-kube-api-access-nxllm\") pod \"d6d18bff-0421-4fe2-812a-73c9660fc78b\" (UID: \"d6d18bff-0421-4fe2-812a-73c9660fc78b\") " Oct 06 15:02:20 crc kubenswrapper[4757]: I1006 15:02:20.312039 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d6d18bff-0421-4fe2-812a-73c9660fc78b-kube-api-access-nxllm" (OuterVolumeSpecName: "kube-api-access-nxllm") pod "d6d18bff-0421-4fe2-812a-73c9660fc78b" (UID: "d6d18bff-0421-4fe2-812a-73c9660fc78b"). InnerVolumeSpecName "kube-api-access-nxllm". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 15:02:20 crc kubenswrapper[4757]: I1006 15:02:20.409778 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nxllm\" (UniqueName: \"kubernetes.io/projected/d6d18bff-0421-4fe2-812a-73c9660fc78b-kube-api-access-nxllm\") on node \"crc\" DevicePath \"\"" Oct 06 15:02:20 crc kubenswrapper[4757]: I1006 15:02:20.739497 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-1"] Oct 06 15:02:20 crc kubenswrapper[4757]: E1006 15:02:20.739865 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d6d18bff-0421-4fe2-812a-73c9660fc78b" containerName="mariadb-client-2-default" Oct 06 15:02:20 crc kubenswrapper[4757]: I1006 15:02:20.739882 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="d6d18bff-0421-4fe2-812a-73c9660fc78b" containerName="mariadb-client-2-default" Oct 06 15:02:20 crc kubenswrapper[4757]: I1006 15:02:20.740029 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="d6d18bff-0421-4fe2-812a-73c9660fc78b" containerName="mariadb-client-2-default" Oct 06 15:02:20 crc kubenswrapper[4757]: I1006 15:02:20.740684 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1" Oct 06 15:02:20 crc kubenswrapper[4757]: I1006 15:02:20.749238 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-1"] Oct 06 15:02:20 crc kubenswrapper[4757]: I1006 15:02:20.772948 4757 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b7c6a5cf974ec55db5785ae821b060a9a43bdfde57131b4877d1868d2a19d2b7" Oct 06 15:02:20 crc kubenswrapper[4757]: I1006 15:02:20.773002 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2-default" Oct 06 15:02:20 crc kubenswrapper[4757]: E1006 15:02:20.906624 4757 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd6d18bff_0421_4fe2_812a_73c9660fc78b.slice\": RecentStats: unable to find data in memory cache]" Oct 06 15:02:20 crc kubenswrapper[4757]: I1006 15:02:20.915618 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-swzbn\" (UniqueName: \"kubernetes.io/projected/35e41d71-1aaf-43c4-b5e8-daed88d05c69-kube-api-access-swzbn\") pod \"mariadb-client-1\" (UID: \"35e41d71-1aaf-43c4-b5e8-daed88d05c69\") " pod="openstack/mariadb-client-1" Oct 06 15:02:21 crc kubenswrapper[4757]: I1006 15:02:21.018225 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-swzbn\" (UniqueName: \"kubernetes.io/projected/35e41d71-1aaf-43c4-b5e8-daed88d05c69-kube-api-access-swzbn\") pod \"mariadb-client-1\" (UID: \"35e41d71-1aaf-43c4-b5e8-daed88d05c69\") " pod="openstack/mariadb-client-1" Oct 06 15:02:21 crc kubenswrapper[4757]: I1006 15:02:21.051556 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-swzbn\" (UniqueName: \"kubernetes.io/projected/35e41d71-1aaf-43c4-b5e8-daed88d05c69-kube-api-access-swzbn\") pod \"mariadb-client-1\" (UID: \"35e41d71-1aaf-43c4-b5e8-daed88d05c69\") " pod="openstack/mariadb-client-1" Oct 06 15:02:21 crc kubenswrapper[4757]: I1006 15:02:21.068311 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1" Oct 06 15:02:21 crc kubenswrapper[4757]: I1006 15:02:21.595264 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-1"] Oct 06 15:02:21 crc kubenswrapper[4757]: I1006 15:02:21.791722 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-1" event={"ID":"35e41d71-1aaf-43c4-b5e8-daed88d05c69","Type":"ContainerStarted","Data":"1a002e77fc091bcc362547539913f92018bd485e1fba260cce44d68254230494"} Oct 06 15:02:21 crc kubenswrapper[4757]: I1006 15:02:21.820555 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/mariadb-client-1" podStartSLOduration=1.820525161 podStartE2EDuration="1.820525161s" podCreationTimestamp="2025-10-06 15:02:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 15:02:21.812299445 +0000 UTC m=+5030.309617982" watchObservedRunningTime="2025-10-06 15:02:21.820525161 +0000 UTC m=+5030.317843748" Oct 06 15:02:21 crc kubenswrapper[4757]: I1006 15:02:21.856178 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-1_35e41d71-1aaf-43c4-b5e8-daed88d05c69/mariadb-client-1/0.log" Oct 06 15:02:22 crc kubenswrapper[4757]: I1006 15:02:22.192434 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d6d18bff-0421-4fe2-812a-73c9660fc78b" path="/var/lib/kubelet/pods/d6d18bff-0421-4fe2-812a-73c9660fc78b/volumes" Oct 06 15:02:22 crc kubenswrapper[4757]: I1006 15:02:22.803144 4757 generic.go:334] "Generic (PLEG): container finished" podID="35e41d71-1aaf-43c4-b5e8-daed88d05c69" containerID="8535c0c0769771e13a076bdf53d5cebf644897ca0c68b80d13fddfaaadba5f59" exitCode=0 Oct 06 15:02:22 crc kubenswrapper[4757]: I1006 15:02:22.803208 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-1" event={"ID":"35e41d71-1aaf-43c4-b5e8-daed88d05c69","Type":"ContainerDied","Data":"8535c0c0769771e13a076bdf53d5cebf644897ca0c68b80d13fddfaaadba5f59"} Oct 06 15:02:24 crc kubenswrapper[4757]: I1006 15:02:24.197385 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1" Oct 06 15:02:24 crc kubenswrapper[4757]: I1006 15:02:24.236822 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-1"] Oct 06 15:02:24 crc kubenswrapper[4757]: I1006 15:02:24.241441 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-1"] Oct 06 15:02:24 crc kubenswrapper[4757]: I1006 15:02:24.373962 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-swzbn\" (UniqueName: \"kubernetes.io/projected/35e41d71-1aaf-43c4-b5e8-daed88d05c69-kube-api-access-swzbn\") pod \"35e41d71-1aaf-43c4-b5e8-daed88d05c69\" (UID: \"35e41d71-1aaf-43c4-b5e8-daed88d05c69\") " Oct 06 15:02:24 crc kubenswrapper[4757]: I1006 15:02:24.385589 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/35e41d71-1aaf-43c4-b5e8-daed88d05c69-kube-api-access-swzbn" (OuterVolumeSpecName: "kube-api-access-swzbn") pod "35e41d71-1aaf-43c4-b5e8-daed88d05c69" (UID: "35e41d71-1aaf-43c4-b5e8-daed88d05c69"). InnerVolumeSpecName "kube-api-access-swzbn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 15:02:24 crc kubenswrapper[4757]: I1006 15:02:24.476808 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-swzbn\" (UniqueName: \"kubernetes.io/projected/35e41d71-1aaf-43c4-b5e8-daed88d05c69-kube-api-access-swzbn\") on node \"crc\" DevicePath \"\"" Oct 06 15:02:24 crc kubenswrapper[4757]: I1006 15:02:24.721667 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-4-default"] Oct 06 15:02:24 crc kubenswrapper[4757]: E1006 15:02:24.722241 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35e41d71-1aaf-43c4-b5e8-daed88d05c69" containerName="mariadb-client-1" Oct 06 15:02:24 crc kubenswrapper[4757]: I1006 15:02:24.722263 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="35e41d71-1aaf-43c4-b5e8-daed88d05c69" containerName="mariadb-client-1" Oct 06 15:02:24 crc kubenswrapper[4757]: I1006 15:02:24.722477 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="35e41d71-1aaf-43c4-b5e8-daed88d05c69" containerName="mariadb-client-1" Oct 06 15:02:24 crc kubenswrapper[4757]: I1006 15:02:24.723197 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-4-default" Oct 06 15:02:24 crc kubenswrapper[4757]: I1006 15:02:24.735852 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-4-default"] Oct 06 15:02:24 crc kubenswrapper[4757]: I1006 15:02:24.823255 4757 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1a002e77fc091bcc362547539913f92018bd485e1fba260cce44d68254230494" Oct 06 15:02:24 crc kubenswrapper[4757]: I1006 15:02:24.823501 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1" Oct 06 15:02:24 crc kubenswrapper[4757]: I1006 15:02:24.882428 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mf65l\" (UniqueName: \"kubernetes.io/projected/e9b36966-b52e-434a-9ddd-52f1e572f0f3-kube-api-access-mf65l\") pod \"mariadb-client-4-default\" (UID: \"e9b36966-b52e-434a-9ddd-52f1e572f0f3\") " pod="openstack/mariadb-client-4-default" Oct 06 15:02:24 crc kubenswrapper[4757]: I1006 15:02:24.983661 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mf65l\" (UniqueName: \"kubernetes.io/projected/e9b36966-b52e-434a-9ddd-52f1e572f0f3-kube-api-access-mf65l\") pod \"mariadb-client-4-default\" (UID: \"e9b36966-b52e-434a-9ddd-52f1e572f0f3\") " pod="openstack/mariadb-client-4-default" Oct 06 15:02:25 crc kubenswrapper[4757]: I1006 15:02:25.009249 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mf65l\" (UniqueName: \"kubernetes.io/projected/e9b36966-b52e-434a-9ddd-52f1e572f0f3-kube-api-access-mf65l\") pod \"mariadb-client-4-default\" (UID: \"e9b36966-b52e-434a-9ddd-52f1e572f0f3\") " pod="openstack/mariadb-client-4-default" Oct 06 15:02:25 crc kubenswrapper[4757]: I1006 15:02:25.039136 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-4-default" Oct 06 15:02:25 crc kubenswrapper[4757]: I1006 15:02:25.180323 4757 scope.go:117] "RemoveContainer" containerID="5c4563212ce072d71c658e00c8096ab0ddad498c69fa13e6bee8d2a76ec2f125" Oct 06 15:02:25 crc kubenswrapper[4757]: E1006 15:02:25.180979 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 15:02:25 crc kubenswrapper[4757]: I1006 15:02:25.557538 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-4-default"] Oct 06 15:02:25 crc kubenswrapper[4757]: I1006 15:02:25.830916 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-4-default" event={"ID":"e9b36966-b52e-434a-9ddd-52f1e572f0f3","Type":"ContainerStarted","Data":"ba9ce92cb45ec8f44f1e476da814f36331c853af818f8766259f4442550bad65"} Oct 06 15:02:26 crc kubenswrapper[4757]: I1006 15:02:26.190023 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="35e41d71-1aaf-43c4-b5e8-daed88d05c69" path="/var/lib/kubelet/pods/35e41d71-1aaf-43c4-b5e8-daed88d05c69/volumes" Oct 06 15:02:26 crc kubenswrapper[4757]: I1006 15:02:26.840211 4757 generic.go:334] "Generic (PLEG): container finished" podID="e9b36966-b52e-434a-9ddd-52f1e572f0f3" containerID="15397b94b7d4fe5cef23a6f0aea4c769777e5955f7293e4fb752f3df064dc80b" exitCode=0 Oct 06 15:02:26 crc kubenswrapper[4757]: I1006 15:02:26.840288 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-4-default" event={"ID":"e9b36966-b52e-434a-9ddd-52f1e572f0f3","Type":"ContainerDied","Data":"15397b94b7d4fe5cef23a6f0aea4c769777e5955f7293e4fb752f3df064dc80b"} Oct 06 15:02:28 crc kubenswrapper[4757]: I1006 15:02:28.227608 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-4-default" Oct 06 15:02:28 crc kubenswrapper[4757]: I1006 15:02:28.243371 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-4-default_e9b36966-b52e-434a-9ddd-52f1e572f0f3/mariadb-client-4-default/0.log" Oct 06 15:02:28 crc kubenswrapper[4757]: I1006 15:02:28.265749 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-4-default"] Oct 06 15:02:28 crc kubenswrapper[4757]: I1006 15:02:28.272938 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-4-default"] Oct 06 15:02:28 crc kubenswrapper[4757]: I1006 15:02:28.343242 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mf65l\" (UniqueName: \"kubernetes.io/projected/e9b36966-b52e-434a-9ddd-52f1e572f0f3-kube-api-access-mf65l\") pod \"e9b36966-b52e-434a-9ddd-52f1e572f0f3\" (UID: \"e9b36966-b52e-434a-9ddd-52f1e572f0f3\") " Oct 06 15:02:28 crc kubenswrapper[4757]: I1006 15:02:28.348736 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e9b36966-b52e-434a-9ddd-52f1e572f0f3-kube-api-access-mf65l" (OuterVolumeSpecName: "kube-api-access-mf65l") pod "e9b36966-b52e-434a-9ddd-52f1e572f0f3" (UID: "e9b36966-b52e-434a-9ddd-52f1e572f0f3"). InnerVolumeSpecName "kube-api-access-mf65l". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 15:02:28 crc kubenswrapper[4757]: I1006 15:02:28.445208 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mf65l\" (UniqueName: \"kubernetes.io/projected/e9b36966-b52e-434a-9ddd-52f1e572f0f3-kube-api-access-mf65l\") on node \"crc\" DevicePath \"\"" Oct 06 15:02:28 crc kubenswrapper[4757]: I1006 15:02:28.856496 4757 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ba9ce92cb45ec8f44f1e476da814f36331c853af818f8766259f4442550bad65" Oct 06 15:02:28 crc kubenswrapper[4757]: I1006 15:02:28.856595 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-4-default" Oct 06 15:02:30 crc kubenswrapper[4757]: I1006 15:02:30.194558 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e9b36966-b52e-434a-9ddd-52f1e572f0f3" path="/var/lib/kubelet/pods/e9b36966-b52e-434a-9ddd-52f1e572f0f3/volumes" Oct 06 15:02:32 crc kubenswrapper[4757]: I1006 15:02:32.202829 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-5-default"] Oct 06 15:02:32 crc kubenswrapper[4757]: E1006 15:02:32.203787 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e9b36966-b52e-434a-9ddd-52f1e572f0f3" containerName="mariadb-client-4-default" Oct 06 15:02:32 crc kubenswrapper[4757]: I1006 15:02:32.203807 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="e9b36966-b52e-434a-9ddd-52f1e572f0f3" containerName="mariadb-client-4-default" Oct 06 15:02:32 crc kubenswrapper[4757]: I1006 15:02:32.203990 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="e9b36966-b52e-434a-9ddd-52f1e572f0f3" containerName="mariadb-client-4-default" Oct 06 15:02:32 crc kubenswrapper[4757]: I1006 15:02:32.204633 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-5-default" Oct 06 15:02:32 crc kubenswrapper[4757]: I1006 15:02:32.208691 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-s55mr" Oct 06 15:02:32 crc kubenswrapper[4757]: I1006 15:02:32.215705 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-5-default"] Oct 06 15:02:32 crc kubenswrapper[4757]: I1006 15:02:32.303378 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l79h2\" (UniqueName: \"kubernetes.io/projected/6eedc392-c9dc-4888-aa9d-ba1a418151be-kube-api-access-l79h2\") pod \"mariadb-client-5-default\" (UID: \"6eedc392-c9dc-4888-aa9d-ba1a418151be\") " pod="openstack/mariadb-client-5-default" Oct 06 15:02:32 crc kubenswrapper[4757]: I1006 15:02:32.405922 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l79h2\" (UniqueName: \"kubernetes.io/projected/6eedc392-c9dc-4888-aa9d-ba1a418151be-kube-api-access-l79h2\") pod \"mariadb-client-5-default\" (UID: \"6eedc392-c9dc-4888-aa9d-ba1a418151be\") " pod="openstack/mariadb-client-5-default" Oct 06 15:02:32 crc kubenswrapper[4757]: I1006 15:02:32.432872 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l79h2\" (UniqueName: \"kubernetes.io/projected/6eedc392-c9dc-4888-aa9d-ba1a418151be-kube-api-access-l79h2\") pod \"mariadb-client-5-default\" (UID: \"6eedc392-c9dc-4888-aa9d-ba1a418151be\") " pod="openstack/mariadb-client-5-default" Oct 06 15:02:32 crc kubenswrapper[4757]: I1006 15:02:32.529701 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-5-default" Oct 06 15:02:33 crc kubenswrapper[4757]: I1006 15:02:33.124035 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-5-default"] Oct 06 15:02:33 crc kubenswrapper[4757]: I1006 15:02:33.898888 4757 generic.go:334] "Generic (PLEG): container finished" podID="6eedc392-c9dc-4888-aa9d-ba1a418151be" containerID="afed9789a9fa5bc388ef68470a9f84147b47e6c3cd95975e9e9cdf606e6fbb7f" exitCode=0 Oct 06 15:02:33 crc kubenswrapper[4757]: I1006 15:02:33.898982 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-5-default" event={"ID":"6eedc392-c9dc-4888-aa9d-ba1a418151be","Type":"ContainerDied","Data":"afed9789a9fa5bc388ef68470a9f84147b47e6c3cd95975e9e9cdf606e6fbb7f"} Oct 06 15:02:33 crc kubenswrapper[4757]: I1006 15:02:33.899196 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-5-default" event={"ID":"6eedc392-c9dc-4888-aa9d-ba1a418151be","Type":"ContainerStarted","Data":"5aeaef636694ec48ecfa6b7aa8a732bec29b0182d623e9ac40941162c8d65e63"} Oct 06 15:02:35 crc kubenswrapper[4757]: I1006 15:02:35.323195 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-5-default" Oct 06 15:02:35 crc kubenswrapper[4757]: I1006 15:02:35.340747 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-5-default_6eedc392-c9dc-4888-aa9d-ba1a418151be/mariadb-client-5-default/0.log" Oct 06 15:02:35 crc kubenswrapper[4757]: I1006 15:02:35.364308 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-5-default"] Oct 06 15:02:35 crc kubenswrapper[4757]: I1006 15:02:35.371234 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-5-default"] Oct 06 15:02:35 crc kubenswrapper[4757]: I1006 15:02:35.467836 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l79h2\" (UniqueName: \"kubernetes.io/projected/6eedc392-c9dc-4888-aa9d-ba1a418151be-kube-api-access-l79h2\") pod \"6eedc392-c9dc-4888-aa9d-ba1a418151be\" (UID: \"6eedc392-c9dc-4888-aa9d-ba1a418151be\") " Oct 06 15:02:35 crc kubenswrapper[4757]: I1006 15:02:35.475379 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6eedc392-c9dc-4888-aa9d-ba1a418151be-kube-api-access-l79h2" (OuterVolumeSpecName: "kube-api-access-l79h2") pod "6eedc392-c9dc-4888-aa9d-ba1a418151be" (UID: "6eedc392-c9dc-4888-aa9d-ba1a418151be"). InnerVolumeSpecName "kube-api-access-l79h2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 15:02:35 crc kubenswrapper[4757]: I1006 15:02:35.517693 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-6-default"] Oct 06 15:02:35 crc kubenswrapper[4757]: E1006 15:02:35.518055 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6eedc392-c9dc-4888-aa9d-ba1a418151be" containerName="mariadb-client-5-default" Oct 06 15:02:35 crc kubenswrapper[4757]: I1006 15:02:35.518072 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="6eedc392-c9dc-4888-aa9d-ba1a418151be" containerName="mariadb-client-5-default" Oct 06 15:02:35 crc kubenswrapper[4757]: I1006 15:02:35.518602 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="6eedc392-c9dc-4888-aa9d-ba1a418151be" containerName="mariadb-client-5-default" Oct 06 15:02:35 crc kubenswrapper[4757]: I1006 15:02:35.534634 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-6-default" Oct 06 15:02:35 crc kubenswrapper[4757]: I1006 15:02:35.546485 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-6-default"] Oct 06 15:02:35 crc kubenswrapper[4757]: I1006 15:02:35.570066 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l79h2\" (UniqueName: \"kubernetes.io/projected/6eedc392-c9dc-4888-aa9d-ba1a418151be-kube-api-access-l79h2\") on node \"crc\" DevicePath \"\"" Oct 06 15:02:35 crc kubenswrapper[4757]: I1006 15:02:35.671822 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pmfmd\" (UniqueName: \"kubernetes.io/projected/569813cc-9eec-49ab-9c13-12dd86b3a3f6-kube-api-access-pmfmd\") pod \"mariadb-client-6-default\" (UID: \"569813cc-9eec-49ab-9c13-12dd86b3a3f6\") " pod="openstack/mariadb-client-6-default" Oct 06 15:02:35 crc kubenswrapper[4757]: I1006 15:02:35.773253 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pmfmd\" (UniqueName: \"kubernetes.io/projected/569813cc-9eec-49ab-9c13-12dd86b3a3f6-kube-api-access-pmfmd\") pod \"mariadb-client-6-default\" (UID: \"569813cc-9eec-49ab-9c13-12dd86b3a3f6\") " pod="openstack/mariadb-client-6-default" Oct 06 15:02:35 crc kubenswrapper[4757]: I1006 15:02:35.791356 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pmfmd\" (UniqueName: \"kubernetes.io/projected/569813cc-9eec-49ab-9c13-12dd86b3a3f6-kube-api-access-pmfmd\") pod \"mariadb-client-6-default\" (UID: \"569813cc-9eec-49ab-9c13-12dd86b3a3f6\") " pod="openstack/mariadb-client-6-default" Oct 06 15:02:35 crc kubenswrapper[4757]: I1006 15:02:35.859870 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-6-default" Oct 06 15:02:35 crc kubenswrapper[4757]: I1006 15:02:35.915576 4757 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5aeaef636694ec48ecfa6b7aa8a732bec29b0182d623e9ac40941162c8d65e63" Oct 06 15:02:35 crc kubenswrapper[4757]: I1006 15:02:35.915643 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-5-default" Oct 06 15:02:36 crc kubenswrapper[4757]: I1006 15:02:36.196326 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6eedc392-c9dc-4888-aa9d-ba1a418151be" path="/var/lib/kubelet/pods/6eedc392-c9dc-4888-aa9d-ba1a418151be/volumes" Oct 06 15:02:36 crc kubenswrapper[4757]: I1006 15:02:36.367775 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-6-default"] Oct 06 15:02:36 crc kubenswrapper[4757]: W1006 15:02:36.372725 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod569813cc_9eec_49ab_9c13_12dd86b3a3f6.slice/crio-fa78469517d40c50aa0d1ef0ba81ca70f6d4cbd9084b269b54b067cff125d145 WatchSource:0}: Error finding container fa78469517d40c50aa0d1ef0ba81ca70f6d4cbd9084b269b54b067cff125d145: Status 404 returned error can't find the container with id fa78469517d40c50aa0d1ef0ba81ca70f6d4cbd9084b269b54b067cff125d145 Oct 06 15:02:36 crc kubenswrapper[4757]: I1006 15:02:36.923202 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-6-default" event={"ID":"569813cc-9eec-49ab-9c13-12dd86b3a3f6","Type":"ContainerStarted","Data":"a5ff5f6d3972e9dee3bef20d2bb5b1ba46435b649139b30fe2f13f9799da229b"} Oct 06 15:02:36 crc kubenswrapper[4757]: I1006 15:02:36.923740 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-6-default" event={"ID":"569813cc-9eec-49ab-9c13-12dd86b3a3f6","Type":"ContainerStarted","Data":"fa78469517d40c50aa0d1ef0ba81ca70f6d4cbd9084b269b54b067cff125d145"} Oct 06 15:02:36 crc kubenswrapper[4757]: I1006 15:02:36.954395 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/mariadb-client-6-default" podStartSLOduration=1.954363693 podStartE2EDuration="1.954363693s" podCreationTimestamp="2025-10-06 15:02:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 15:02:36.937819338 +0000 UTC m=+5045.435137885" watchObservedRunningTime="2025-10-06 15:02:36.954363693 +0000 UTC m=+5045.451682270" Oct 06 15:02:37 crc kubenswrapper[4757]: I1006 15:02:37.019802 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-6-default_569813cc-9eec-49ab-9c13-12dd86b3a3f6/mariadb-client-6-default/0.log" Oct 06 15:02:37 crc kubenswrapper[4757]: I1006 15:02:37.180363 4757 scope.go:117] "RemoveContainer" containerID="5c4563212ce072d71c658e00c8096ab0ddad498c69fa13e6bee8d2a76ec2f125" Oct 06 15:02:37 crc kubenswrapper[4757]: E1006 15:02:37.180645 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 15:02:37 crc kubenswrapper[4757]: I1006 15:02:37.932373 4757 generic.go:334] "Generic (PLEG): container finished" podID="569813cc-9eec-49ab-9c13-12dd86b3a3f6" containerID="a5ff5f6d3972e9dee3bef20d2bb5b1ba46435b649139b30fe2f13f9799da229b" exitCode=0 Oct 06 15:02:37 crc kubenswrapper[4757]: I1006 15:02:37.932426 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-6-default" event={"ID":"569813cc-9eec-49ab-9c13-12dd86b3a3f6","Type":"ContainerDied","Data":"a5ff5f6d3972e9dee3bef20d2bb5b1ba46435b649139b30fe2f13f9799da229b"} Oct 06 15:02:39 crc kubenswrapper[4757]: I1006 15:02:39.330162 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-6-default" Oct 06 15:02:39 crc kubenswrapper[4757]: I1006 15:02:39.372496 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-6-default"] Oct 06 15:02:39 crc kubenswrapper[4757]: I1006 15:02:39.379069 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-6-default"] Oct 06 15:02:39 crc kubenswrapper[4757]: I1006 15:02:39.435003 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pmfmd\" (UniqueName: \"kubernetes.io/projected/569813cc-9eec-49ab-9c13-12dd86b3a3f6-kube-api-access-pmfmd\") pod \"569813cc-9eec-49ab-9c13-12dd86b3a3f6\" (UID: \"569813cc-9eec-49ab-9c13-12dd86b3a3f6\") " Oct 06 15:02:39 crc kubenswrapper[4757]: I1006 15:02:39.440608 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/569813cc-9eec-49ab-9c13-12dd86b3a3f6-kube-api-access-pmfmd" (OuterVolumeSpecName: "kube-api-access-pmfmd") pod "569813cc-9eec-49ab-9c13-12dd86b3a3f6" (UID: "569813cc-9eec-49ab-9c13-12dd86b3a3f6"). InnerVolumeSpecName "kube-api-access-pmfmd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 15:02:39 crc kubenswrapper[4757]: I1006 15:02:39.513844 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-7-default"] Oct 06 15:02:39 crc kubenswrapper[4757]: E1006 15:02:39.514496 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="569813cc-9eec-49ab-9c13-12dd86b3a3f6" containerName="mariadb-client-6-default" Oct 06 15:02:39 crc kubenswrapper[4757]: I1006 15:02:39.514539 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="569813cc-9eec-49ab-9c13-12dd86b3a3f6" containerName="mariadb-client-6-default" Oct 06 15:02:39 crc kubenswrapper[4757]: I1006 15:02:39.514943 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="569813cc-9eec-49ab-9c13-12dd86b3a3f6" containerName="mariadb-client-6-default" Oct 06 15:02:39 crc kubenswrapper[4757]: I1006 15:02:39.515894 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-7-default" Oct 06 15:02:39 crc kubenswrapper[4757]: I1006 15:02:39.519353 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-7-default"] Oct 06 15:02:39 crc kubenswrapper[4757]: I1006 15:02:39.536311 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pmfmd\" (UniqueName: \"kubernetes.io/projected/569813cc-9eec-49ab-9c13-12dd86b3a3f6-kube-api-access-pmfmd\") on node \"crc\" DevicePath \"\"" Oct 06 15:02:39 crc kubenswrapper[4757]: I1006 15:02:39.638129 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tgtxn\" (UniqueName: \"kubernetes.io/projected/16f29920-78e8-4894-b06f-dc591bf63115-kube-api-access-tgtxn\") pod \"mariadb-client-7-default\" (UID: \"16f29920-78e8-4894-b06f-dc591bf63115\") " pod="openstack/mariadb-client-7-default" Oct 06 15:02:39 crc kubenswrapper[4757]: I1006 15:02:39.739794 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tgtxn\" (UniqueName: \"kubernetes.io/projected/16f29920-78e8-4894-b06f-dc591bf63115-kube-api-access-tgtxn\") pod \"mariadb-client-7-default\" (UID: \"16f29920-78e8-4894-b06f-dc591bf63115\") " pod="openstack/mariadb-client-7-default" Oct 06 15:02:39 crc kubenswrapper[4757]: I1006 15:02:39.764897 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tgtxn\" (UniqueName: \"kubernetes.io/projected/16f29920-78e8-4894-b06f-dc591bf63115-kube-api-access-tgtxn\") pod \"mariadb-client-7-default\" (UID: \"16f29920-78e8-4894-b06f-dc591bf63115\") " pod="openstack/mariadb-client-7-default" Oct 06 15:02:39 crc kubenswrapper[4757]: I1006 15:02:39.836911 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-7-default" Oct 06 15:02:39 crc kubenswrapper[4757]: I1006 15:02:39.953916 4757 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fa78469517d40c50aa0d1ef0ba81ca70f6d4cbd9084b269b54b067cff125d145" Oct 06 15:02:39 crc kubenswrapper[4757]: I1006 15:02:39.953992 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-6-default" Oct 06 15:02:40 crc kubenswrapper[4757]: I1006 15:02:40.195234 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="569813cc-9eec-49ab-9c13-12dd86b3a3f6" path="/var/lib/kubelet/pods/569813cc-9eec-49ab-9c13-12dd86b3a3f6/volumes" Oct 06 15:02:40 crc kubenswrapper[4757]: I1006 15:02:40.353147 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-7-default"] Oct 06 15:02:40 crc kubenswrapper[4757]: I1006 15:02:40.965299 4757 generic.go:334] "Generic (PLEG): container finished" podID="16f29920-78e8-4894-b06f-dc591bf63115" containerID="37654d3d038afc23b92fe3a958f582e331756a1f5668ac5128cb8df7a7999dfc" exitCode=0 Oct 06 15:02:40 crc kubenswrapper[4757]: I1006 15:02:40.965359 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-7-default" event={"ID":"16f29920-78e8-4894-b06f-dc591bf63115","Type":"ContainerDied","Data":"37654d3d038afc23b92fe3a958f582e331756a1f5668ac5128cb8df7a7999dfc"} Oct 06 15:02:40 crc kubenswrapper[4757]: I1006 15:02:40.965387 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-7-default" event={"ID":"16f29920-78e8-4894-b06f-dc591bf63115","Type":"ContainerStarted","Data":"2e27f71d3278d90ce7eb769aea0ce8e27ff9bb4c38b2182bea8cdfb8d8a0046c"} Oct 06 15:02:42 crc kubenswrapper[4757]: I1006 15:02:42.440539 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-7-default" Oct 06 15:02:42 crc kubenswrapper[4757]: I1006 15:02:42.458220 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-7-default_16f29920-78e8-4894-b06f-dc591bf63115/mariadb-client-7-default/0.log" Oct 06 15:02:42 crc kubenswrapper[4757]: I1006 15:02:42.482191 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-7-default"] Oct 06 15:02:42 crc kubenswrapper[4757]: I1006 15:02:42.487406 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-7-default"] Oct 06 15:02:42 crc kubenswrapper[4757]: I1006 15:02:42.582771 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tgtxn\" (UniqueName: \"kubernetes.io/projected/16f29920-78e8-4894-b06f-dc591bf63115-kube-api-access-tgtxn\") pod \"16f29920-78e8-4894-b06f-dc591bf63115\" (UID: \"16f29920-78e8-4894-b06f-dc591bf63115\") " Oct 06 15:02:42 crc kubenswrapper[4757]: I1006 15:02:42.587486 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/16f29920-78e8-4894-b06f-dc591bf63115-kube-api-access-tgtxn" (OuterVolumeSpecName: "kube-api-access-tgtxn") pod "16f29920-78e8-4894-b06f-dc591bf63115" (UID: "16f29920-78e8-4894-b06f-dc591bf63115"). InnerVolumeSpecName "kube-api-access-tgtxn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 15:02:42 crc kubenswrapper[4757]: I1006 15:02:42.618433 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-2"] Oct 06 15:02:42 crc kubenswrapper[4757]: E1006 15:02:42.618833 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="16f29920-78e8-4894-b06f-dc591bf63115" containerName="mariadb-client-7-default" Oct 06 15:02:42 crc kubenswrapper[4757]: I1006 15:02:42.618858 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="16f29920-78e8-4894-b06f-dc591bf63115" containerName="mariadb-client-7-default" Oct 06 15:02:42 crc kubenswrapper[4757]: I1006 15:02:42.619066 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="16f29920-78e8-4894-b06f-dc591bf63115" containerName="mariadb-client-7-default" Oct 06 15:02:42 crc kubenswrapper[4757]: I1006 15:02:42.619911 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2" Oct 06 15:02:42 crc kubenswrapper[4757]: I1006 15:02:42.632938 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-2"] Oct 06 15:02:42 crc kubenswrapper[4757]: I1006 15:02:42.684068 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tgtxn\" (UniqueName: \"kubernetes.io/projected/16f29920-78e8-4894-b06f-dc591bf63115-kube-api-access-tgtxn\") on node \"crc\" DevicePath \"\"" Oct 06 15:02:42 crc kubenswrapper[4757]: I1006 15:02:42.785689 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lwwbt\" (UniqueName: \"kubernetes.io/projected/27dff4ad-45c2-4d58-b0de-8736a5ce12db-kube-api-access-lwwbt\") pod \"mariadb-client-2\" (UID: \"27dff4ad-45c2-4d58-b0de-8736a5ce12db\") " pod="openstack/mariadb-client-2" Oct 06 15:02:42 crc kubenswrapper[4757]: I1006 15:02:42.887007 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lwwbt\" (UniqueName: \"kubernetes.io/projected/27dff4ad-45c2-4d58-b0de-8736a5ce12db-kube-api-access-lwwbt\") pod \"mariadb-client-2\" (UID: \"27dff4ad-45c2-4d58-b0de-8736a5ce12db\") " pod="openstack/mariadb-client-2" Oct 06 15:02:42 crc kubenswrapper[4757]: I1006 15:02:42.903186 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lwwbt\" (UniqueName: \"kubernetes.io/projected/27dff4ad-45c2-4d58-b0de-8736a5ce12db-kube-api-access-lwwbt\") pod \"mariadb-client-2\" (UID: \"27dff4ad-45c2-4d58-b0de-8736a5ce12db\") " pod="openstack/mariadb-client-2" Oct 06 15:02:42 crc kubenswrapper[4757]: I1006 15:02:42.951852 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2" Oct 06 15:02:42 crc kubenswrapper[4757]: I1006 15:02:42.988956 4757 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2e27f71d3278d90ce7eb769aea0ce8e27ff9bb4c38b2182bea8cdfb8d8a0046c" Oct 06 15:02:42 crc kubenswrapper[4757]: I1006 15:02:42.989033 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-7-default" Oct 06 15:02:43 crc kubenswrapper[4757]: I1006 15:02:43.461222 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-2"] Oct 06 15:02:44 crc kubenswrapper[4757]: I1006 15:02:44.006839 4757 generic.go:334] "Generic (PLEG): container finished" podID="27dff4ad-45c2-4d58-b0de-8736a5ce12db" containerID="7dda30fd00e79ca296467f36a6434b1326426000fd6045216956436574dae36c" exitCode=0 Oct 06 15:02:44 crc kubenswrapper[4757]: I1006 15:02:44.007215 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-2" event={"ID":"27dff4ad-45c2-4d58-b0de-8736a5ce12db","Type":"ContainerDied","Data":"7dda30fd00e79ca296467f36a6434b1326426000fd6045216956436574dae36c"} Oct 06 15:02:44 crc kubenswrapper[4757]: I1006 15:02:44.007526 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-2" event={"ID":"27dff4ad-45c2-4d58-b0de-8736a5ce12db","Type":"ContainerStarted","Data":"521870d1739d723b927bb865f77e2bb2717575689014ada237a345b1f8c3b23c"} Oct 06 15:02:44 crc kubenswrapper[4757]: I1006 15:02:44.198219 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="16f29920-78e8-4894-b06f-dc591bf63115" path="/var/lib/kubelet/pods/16f29920-78e8-4894-b06f-dc591bf63115/volumes" Oct 06 15:02:45 crc kubenswrapper[4757]: I1006 15:02:45.456556 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2" Oct 06 15:02:45 crc kubenswrapper[4757]: I1006 15:02:45.473334 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-2_27dff4ad-45c2-4d58-b0de-8736a5ce12db/mariadb-client-2/0.log" Oct 06 15:02:45 crc kubenswrapper[4757]: I1006 15:02:45.497735 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-2"] Oct 06 15:02:45 crc kubenswrapper[4757]: I1006 15:02:45.503611 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-2"] Oct 06 15:02:45 crc kubenswrapper[4757]: I1006 15:02:45.540306 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lwwbt\" (UniqueName: \"kubernetes.io/projected/27dff4ad-45c2-4d58-b0de-8736a5ce12db-kube-api-access-lwwbt\") pod \"27dff4ad-45c2-4d58-b0de-8736a5ce12db\" (UID: \"27dff4ad-45c2-4d58-b0de-8736a5ce12db\") " Oct 06 15:02:45 crc kubenswrapper[4757]: I1006 15:02:45.546576 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/27dff4ad-45c2-4d58-b0de-8736a5ce12db-kube-api-access-lwwbt" (OuterVolumeSpecName: "kube-api-access-lwwbt") pod "27dff4ad-45c2-4d58-b0de-8736a5ce12db" (UID: "27dff4ad-45c2-4d58-b0de-8736a5ce12db"). InnerVolumeSpecName "kube-api-access-lwwbt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 15:02:45 crc kubenswrapper[4757]: I1006 15:02:45.642539 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lwwbt\" (UniqueName: \"kubernetes.io/projected/27dff4ad-45c2-4d58-b0de-8736a5ce12db-kube-api-access-lwwbt\") on node \"crc\" DevicePath \"\"" Oct 06 15:02:46 crc kubenswrapper[4757]: I1006 15:02:46.047080 4757 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="521870d1739d723b927bb865f77e2bb2717575689014ada237a345b1f8c3b23c" Oct 06 15:02:46 crc kubenswrapper[4757]: I1006 15:02:46.047217 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2" Oct 06 15:02:46 crc kubenswrapper[4757]: I1006 15:02:46.197120 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="27dff4ad-45c2-4d58-b0de-8736a5ce12db" path="/var/lib/kubelet/pods/27dff4ad-45c2-4d58-b0de-8736a5ce12db/volumes" Oct 06 15:02:52 crc kubenswrapper[4757]: I1006 15:02:52.184373 4757 scope.go:117] "RemoveContainer" containerID="5c4563212ce072d71c658e00c8096ab0ddad498c69fa13e6bee8d2a76ec2f125" Oct 06 15:02:52 crc kubenswrapper[4757]: E1006 15:02:52.185173 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 15:03:03 crc kubenswrapper[4757]: I1006 15:03:03.180000 4757 scope.go:117] "RemoveContainer" containerID="5c4563212ce072d71c658e00c8096ab0ddad498c69fa13e6bee8d2a76ec2f125" Oct 06 15:03:03 crc kubenswrapper[4757]: E1006 15:03:03.180628 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 15:03:04 crc kubenswrapper[4757]: I1006 15:03:04.114265 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-qvdp7"] Oct 06 15:03:04 crc kubenswrapper[4757]: E1006 15:03:04.114634 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27dff4ad-45c2-4d58-b0de-8736a5ce12db" containerName="mariadb-client-2" Oct 06 15:03:04 crc kubenswrapper[4757]: I1006 15:03:04.114655 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="27dff4ad-45c2-4d58-b0de-8736a5ce12db" containerName="mariadb-client-2" Oct 06 15:03:04 crc kubenswrapper[4757]: I1006 15:03:04.117021 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="27dff4ad-45c2-4d58-b0de-8736a5ce12db" containerName="mariadb-client-2" Oct 06 15:03:04 crc kubenswrapper[4757]: I1006 15:03:04.120194 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qvdp7" Oct 06 15:03:04 crc kubenswrapper[4757]: I1006 15:03:04.151785 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-qvdp7"] Oct 06 15:03:04 crc kubenswrapper[4757]: I1006 15:03:04.237939 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wwdqv\" (UniqueName: \"kubernetes.io/projected/c4ea59e5-562b-4718-9e56-a0c001051f73-kube-api-access-wwdqv\") pod \"redhat-marketplace-qvdp7\" (UID: \"c4ea59e5-562b-4718-9e56-a0c001051f73\") " pod="openshift-marketplace/redhat-marketplace-qvdp7" Oct 06 15:03:04 crc kubenswrapper[4757]: I1006 15:03:04.238742 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c4ea59e5-562b-4718-9e56-a0c001051f73-catalog-content\") pod \"redhat-marketplace-qvdp7\" (UID: \"c4ea59e5-562b-4718-9e56-a0c001051f73\") " pod="openshift-marketplace/redhat-marketplace-qvdp7" Oct 06 15:03:04 crc kubenswrapper[4757]: I1006 15:03:04.238845 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c4ea59e5-562b-4718-9e56-a0c001051f73-utilities\") pod \"redhat-marketplace-qvdp7\" (UID: \"c4ea59e5-562b-4718-9e56-a0c001051f73\") " pod="openshift-marketplace/redhat-marketplace-qvdp7" Oct 06 15:03:04 crc kubenswrapper[4757]: I1006 15:03:04.340427 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c4ea59e5-562b-4718-9e56-a0c001051f73-catalog-content\") pod \"redhat-marketplace-qvdp7\" (UID: \"c4ea59e5-562b-4718-9e56-a0c001051f73\") " pod="openshift-marketplace/redhat-marketplace-qvdp7" Oct 06 15:03:04 crc kubenswrapper[4757]: I1006 15:03:04.340725 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c4ea59e5-562b-4718-9e56-a0c001051f73-utilities\") pod \"redhat-marketplace-qvdp7\" (UID: \"c4ea59e5-562b-4718-9e56-a0c001051f73\") " pod="openshift-marketplace/redhat-marketplace-qvdp7" Oct 06 15:03:04 crc kubenswrapper[4757]: I1006 15:03:04.340893 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wwdqv\" (UniqueName: \"kubernetes.io/projected/c4ea59e5-562b-4718-9e56-a0c001051f73-kube-api-access-wwdqv\") pod \"redhat-marketplace-qvdp7\" (UID: \"c4ea59e5-562b-4718-9e56-a0c001051f73\") " pod="openshift-marketplace/redhat-marketplace-qvdp7" Oct 06 15:03:04 crc kubenswrapper[4757]: I1006 15:03:04.340973 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c4ea59e5-562b-4718-9e56-a0c001051f73-catalog-content\") pod \"redhat-marketplace-qvdp7\" (UID: \"c4ea59e5-562b-4718-9e56-a0c001051f73\") " pod="openshift-marketplace/redhat-marketplace-qvdp7" Oct 06 15:03:04 crc kubenswrapper[4757]: I1006 15:03:04.341221 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c4ea59e5-562b-4718-9e56-a0c001051f73-utilities\") pod \"redhat-marketplace-qvdp7\" (UID: \"c4ea59e5-562b-4718-9e56-a0c001051f73\") " pod="openshift-marketplace/redhat-marketplace-qvdp7" Oct 06 15:03:04 crc kubenswrapper[4757]: I1006 15:03:04.360559 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wwdqv\" (UniqueName: \"kubernetes.io/projected/c4ea59e5-562b-4718-9e56-a0c001051f73-kube-api-access-wwdqv\") pod \"redhat-marketplace-qvdp7\" (UID: \"c4ea59e5-562b-4718-9e56-a0c001051f73\") " pod="openshift-marketplace/redhat-marketplace-qvdp7" Oct 06 15:03:04 crc kubenswrapper[4757]: I1006 15:03:04.448792 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qvdp7" Oct 06 15:03:04 crc kubenswrapper[4757]: I1006 15:03:04.652553 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-qvdp7"] Oct 06 15:03:05 crc kubenswrapper[4757]: I1006 15:03:05.201860 4757 generic.go:334] "Generic (PLEG): container finished" podID="c4ea59e5-562b-4718-9e56-a0c001051f73" containerID="1f161d27624244402180b81c4847324bef51d2c12407e8f98cac4b4437f6c916" exitCode=0 Oct 06 15:03:05 crc kubenswrapper[4757]: I1006 15:03:05.201918 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qvdp7" event={"ID":"c4ea59e5-562b-4718-9e56-a0c001051f73","Type":"ContainerDied","Data":"1f161d27624244402180b81c4847324bef51d2c12407e8f98cac4b4437f6c916"} Oct 06 15:03:05 crc kubenswrapper[4757]: I1006 15:03:05.202206 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qvdp7" event={"ID":"c4ea59e5-562b-4718-9e56-a0c001051f73","Type":"ContainerStarted","Data":"2b761164aedd2c5b38effcc97a9da7baf17315d43697fa5449761c5d4086261d"} Oct 06 15:03:05 crc kubenswrapper[4757]: I1006 15:03:05.204844 4757 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 06 15:03:06 crc kubenswrapper[4757]: I1006 15:03:06.211436 4757 generic.go:334] "Generic (PLEG): container finished" podID="c4ea59e5-562b-4718-9e56-a0c001051f73" containerID="a9a8231c657b37512866f3cbb76f050511f2ac4e00a2d063ac678a11e02e50cf" exitCode=0 Oct 06 15:03:06 crc kubenswrapper[4757]: I1006 15:03:06.211517 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qvdp7" event={"ID":"c4ea59e5-562b-4718-9e56-a0c001051f73","Type":"ContainerDied","Data":"a9a8231c657b37512866f3cbb76f050511f2ac4e00a2d063ac678a11e02e50cf"} Oct 06 15:03:07 crc kubenswrapper[4757]: I1006 15:03:07.223956 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qvdp7" event={"ID":"c4ea59e5-562b-4718-9e56-a0c001051f73","Type":"ContainerStarted","Data":"caa6e85026ebd4bb8874f99937f76456cf8dc26a2ff5e2687ab24ba93dd8f90d"} Oct 06 15:03:07 crc kubenswrapper[4757]: I1006 15:03:07.246054 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-qvdp7" podStartSLOduration=1.645649461 podStartE2EDuration="3.246029214s" podCreationTimestamp="2025-10-06 15:03:04 +0000 UTC" firstStartedPulling="2025-10-06 15:03:05.203944113 +0000 UTC m=+5073.701262680" lastFinishedPulling="2025-10-06 15:03:06.804323896 +0000 UTC m=+5075.301642433" observedRunningTime="2025-10-06 15:03:07.237706785 +0000 UTC m=+5075.735025362" watchObservedRunningTime="2025-10-06 15:03:07.246029214 +0000 UTC m=+5075.743347761" Oct 06 15:03:14 crc kubenswrapper[4757]: I1006 15:03:14.449799 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-qvdp7" Oct 06 15:03:14 crc kubenswrapper[4757]: I1006 15:03:14.450266 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-qvdp7" Oct 06 15:03:14 crc kubenswrapper[4757]: I1006 15:03:14.509533 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-qvdp7" Oct 06 15:03:15 crc kubenswrapper[4757]: I1006 15:03:15.180334 4757 scope.go:117] "RemoveContainer" containerID="5c4563212ce072d71c658e00c8096ab0ddad498c69fa13e6bee8d2a76ec2f125" Oct 06 15:03:15 crc kubenswrapper[4757]: E1006 15:03:15.180846 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 15:03:15 crc kubenswrapper[4757]: I1006 15:03:15.361756 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-qvdp7" Oct 06 15:03:15 crc kubenswrapper[4757]: I1006 15:03:15.504259 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-qvdp7"] Oct 06 15:03:17 crc kubenswrapper[4757]: I1006 15:03:17.315078 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-qvdp7" podUID="c4ea59e5-562b-4718-9e56-a0c001051f73" containerName="registry-server" containerID="cri-o://caa6e85026ebd4bb8874f99937f76456cf8dc26a2ff5e2687ab24ba93dd8f90d" gracePeriod=2 Oct 06 15:03:17 crc kubenswrapper[4757]: I1006 15:03:17.801651 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qvdp7" Oct 06 15:03:17 crc kubenswrapper[4757]: I1006 15:03:17.961522 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wwdqv\" (UniqueName: \"kubernetes.io/projected/c4ea59e5-562b-4718-9e56-a0c001051f73-kube-api-access-wwdqv\") pod \"c4ea59e5-562b-4718-9e56-a0c001051f73\" (UID: \"c4ea59e5-562b-4718-9e56-a0c001051f73\") " Oct 06 15:03:17 crc kubenswrapper[4757]: I1006 15:03:17.961626 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c4ea59e5-562b-4718-9e56-a0c001051f73-utilities\") pod \"c4ea59e5-562b-4718-9e56-a0c001051f73\" (UID: \"c4ea59e5-562b-4718-9e56-a0c001051f73\") " Oct 06 15:03:17 crc kubenswrapper[4757]: I1006 15:03:17.961681 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c4ea59e5-562b-4718-9e56-a0c001051f73-catalog-content\") pod \"c4ea59e5-562b-4718-9e56-a0c001051f73\" (UID: \"c4ea59e5-562b-4718-9e56-a0c001051f73\") " Oct 06 15:03:17 crc kubenswrapper[4757]: I1006 15:03:17.963193 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c4ea59e5-562b-4718-9e56-a0c001051f73-utilities" (OuterVolumeSpecName: "utilities") pod "c4ea59e5-562b-4718-9e56-a0c001051f73" (UID: "c4ea59e5-562b-4718-9e56-a0c001051f73"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 15:03:17 crc kubenswrapper[4757]: I1006 15:03:17.967192 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c4ea59e5-562b-4718-9e56-a0c001051f73-kube-api-access-wwdqv" (OuterVolumeSpecName: "kube-api-access-wwdqv") pod "c4ea59e5-562b-4718-9e56-a0c001051f73" (UID: "c4ea59e5-562b-4718-9e56-a0c001051f73"). InnerVolumeSpecName "kube-api-access-wwdqv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 15:03:17 crc kubenswrapper[4757]: I1006 15:03:17.979220 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c4ea59e5-562b-4718-9e56-a0c001051f73-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c4ea59e5-562b-4718-9e56-a0c001051f73" (UID: "c4ea59e5-562b-4718-9e56-a0c001051f73"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 15:03:18 crc kubenswrapper[4757]: I1006 15:03:18.063941 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wwdqv\" (UniqueName: \"kubernetes.io/projected/c4ea59e5-562b-4718-9e56-a0c001051f73-kube-api-access-wwdqv\") on node \"crc\" DevicePath \"\"" Oct 06 15:03:18 crc kubenswrapper[4757]: I1006 15:03:18.063971 4757 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c4ea59e5-562b-4718-9e56-a0c001051f73-utilities\") on node \"crc\" DevicePath \"\"" Oct 06 15:03:18 crc kubenswrapper[4757]: I1006 15:03:18.063983 4757 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c4ea59e5-562b-4718-9e56-a0c001051f73-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 06 15:03:18 crc kubenswrapper[4757]: I1006 15:03:18.328324 4757 generic.go:334] "Generic (PLEG): container finished" podID="c4ea59e5-562b-4718-9e56-a0c001051f73" containerID="caa6e85026ebd4bb8874f99937f76456cf8dc26a2ff5e2687ab24ba93dd8f90d" exitCode=0 Oct 06 15:03:18 crc kubenswrapper[4757]: I1006 15:03:18.328445 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qvdp7" Oct 06 15:03:18 crc kubenswrapper[4757]: I1006 15:03:18.328442 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qvdp7" event={"ID":"c4ea59e5-562b-4718-9e56-a0c001051f73","Type":"ContainerDied","Data":"caa6e85026ebd4bb8874f99937f76456cf8dc26a2ff5e2687ab24ba93dd8f90d"} Oct 06 15:03:18 crc kubenswrapper[4757]: I1006 15:03:18.328642 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qvdp7" event={"ID":"c4ea59e5-562b-4718-9e56-a0c001051f73","Type":"ContainerDied","Data":"2b761164aedd2c5b38effcc97a9da7baf17315d43697fa5449761c5d4086261d"} Oct 06 15:03:18 crc kubenswrapper[4757]: I1006 15:03:18.328728 4757 scope.go:117] "RemoveContainer" containerID="caa6e85026ebd4bb8874f99937f76456cf8dc26a2ff5e2687ab24ba93dd8f90d" Oct 06 15:03:18 crc kubenswrapper[4757]: I1006 15:03:18.355641 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-qvdp7"] Oct 06 15:03:18 crc kubenswrapper[4757]: I1006 15:03:18.360869 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-qvdp7"] Oct 06 15:03:18 crc kubenswrapper[4757]: I1006 15:03:18.362724 4757 scope.go:117] "RemoveContainer" containerID="a9a8231c657b37512866f3cbb76f050511f2ac4e00a2d063ac678a11e02e50cf" Oct 06 15:03:18 crc kubenswrapper[4757]: I1006 15:03:18.382146 4757 scope.go:117] "RemoveContainer" containerID="1f161d27624244402180b81c4847324bef51d2c12407e8f98cac4b4437f6c916" Oct 06 15:03:18 crc kubenswrapper[4757]: I1006 15:03:18.421242 4757 scope.go:117] "RemoveContainer" containerID="caa6e85026ebd4bb8874f99937f76456cf8dc26a2ff5e2687ab24ba93dd8f90d" Oct 06 15:03:18 crc kubenswrapper[4757]: E1006 15:03:18.421696 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"caa6e85026ebd4bb8874f99937f76456cf8dc26a2ff5e2687ab24ba93dd8f90d\": container with ID starting with caa6e85026ebd4bb8874f99937f76456cf8dc26a2ff5e2687ab24ba93dd8f90d not found: ID does not exist" containerID="caa6e85026ebd4bb8874f99937f76456cf8dc26a2ff5e2687ab24ba93dd8f90d" Oct 06 15:03:18 crc kubenswrapper[4757]: I1006 15:03:18.421723 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"caa6e85026ebd4bb8874f99937f76456cf8dc26a2ff5e2687ab24ba93dd8f90d"} err="failed to get container status \"caa6e85026ebd4bb8874f99937f76456cf8dc26a2ff5e2687ab24ba93dd8f90d\": rpc error: code = NotFound desc = could not find container \"caa6e85026ebd4bb8874f99937f76456cf8dc26a2ff5e2687ab24ba93dd8f90d\": container with ID starting with caa6e85026ebd4bb8874f99937f76456cf8dc26a2ff5e2687ab24ba93dd8f90d not found: ID does not exist" Oct 06 15:03:18 crc kubenswrapper[4757]: I1006 15:03:18.421744 4757 scope.go:117] "RemoveContainer" containerID="a9a8231c657b37512866f3cbb76f050511f2ac4e00a2d063ac678a11e02e50cf" Oct 06 15:03:18 crc kubenswrapper[4757]: E1006 15:03:18.422160 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a9a8231c657b37512866f3cbb76f050511f2ac4e00a2d063ac678a11e02e50cf\": container with ID starting with a9a8231c657b37512866f3cbb76f050511f2ac4e00a2d063ac678a11e02e50cf not found: ID does not exist" containerID="a9a8231c657b37512866f3cbb76f050511f2ac4e00a2d063ac678a11e02e50cf" Oct 06 15:03:18 crc kubenswrapper[4757]: I1006 15:03:18.422223 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a9a8231c657b37512866f3cbb76f050511f2ac4e00a2d063ac678a11e02e50cf"} err="failed to get container status \"a9a8231c657b37512866f3cbb76f050511f2ac4e00a2d063ac678a11e02e50cf\": rpc error: code = NotFound desc = could not find container \"a9a8231c657b37512866f3cbb76f050511f2ac4e00a2d063ac678a11e02e50cf\": container with ID starting with a9a8231c657b37512866f3cbb76f050511f2ac4e00a2d063ac678a11e02e50cf not found: ID does not exist" Oct 06 15:03:18 crc kubenswrapper[4757]: I1006 15:03:18.422265 4757 scope.go:117] "RemoveContainer" containerID="1f161d27624244402180b81c4847324bef51d2c12407e8f98cac4b4437f6c916" Oct 06 15:03:18 crc kubenswrapper[4757]: E1006 15:03:18.422600 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1f161d27624244402180b81c4847324bef51d2c12407e8f98cac4b4437f6c916\": container with ID starting with 1f161d27624244402180b81c4847324bef51d2c12407e8f98cac4b4437f6c916 not found: ID does not exist" containerID="1f161d27624244402180b81c4847324bef51d2c12407e8f98cac4b4437f6c916" Oct 06 15:03:18 crc kubenswrapper[4757]: I1006 15:03:18.422636 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1f161d27624244402180b81c4847324bef51d2c12407e8f98cac4b4437f6c916"} err="failed to get container status \"1f161d27624244402180b81c4847324bef51d2c12407e8f98cac4b4437f6c916\": rpc error: code = NotFound desc = could not find container \"1f161d27624244402180b81c4847324bef51d2c12407e8f98cac4b4437f6c916\": container with ID starting with 1f161d27624244402180b81c4847324bef51d2c12407e8f98cac4b4437f6c916 not found: ID does not exist" Oct 06 15:03:20 crc kubenswrapper[4757]: I1006 15:03:20.192832 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c4ea59e5-562b-4718-9e56-a0c001051f73" path="/var/lib/kubelet/pods/c4ea59e5-562b-4718-9e56-a0c001051f73/volumes" Oct 06 15:03:27 crc kubenswrapper[4757]: I1006 15:03:27.179718 4757 scope.go:117] "RemoveContainer" containerID="5c4563212ce072d71c658e00c8096ab0ddad498c69fa13e6bee8d2a76ec2f125" Oct 06 15:03:27 crc kubenswrapper[4757]: E1006 15:03:27.180422 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 15:03:42 crc kubenswrapper[4757]: I1006 15:03:42.184029 4757 scope.go:117] "RemoveContainer" containerID="5c4563212ce072d71c658e00c8096ab0ddad498c69fa13e6bee8d2a76ec2f125" Oct 06 15:03:42 crc kubenswrapper[4757]: E1006 15:03:42.184805 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 15:03:47 crc kubenswrapper[4757]: I1006 15:03:47.691781 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-zxglw"] Oct 06 15:03:47 crc kubenswrapper[4757]: E1006 15:03:47.693662 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c4ea59e5-562b-4718-9e56-a0c001051f73" containerName="extract-content" Oct 06 15:03:47 crc kubenswrapper[4757]: I1006 15:03:47.693752 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="c4ea59e5-562b-4718-9e56-a0c001051f73" containerName="extract-content" Oct 06 15:03:47 crc kubenswrapper[4757]: E1006 15:03:47.693841 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c4ea59e5-562b-4718-9e56-a0c001051f73" containerName="registry-server" Oct 06 15:03:47 crc kubenswrapper[4757]: I1006 15:03:47.693915 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="c4ea59e5-562b-4718-9e56-a0c001051f73" containerName="registry-server" Oct 06 15:03:47 crc kubenswrapper[4757]: E1006 15:03:47.693987 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c4ea59e5-562b-4718-9e56-a0c001051f73" containerName="extract-utilities" Oct 06 15:03:47 crc kubenswrapper[4757]: I1006 15:03:47.694040 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="c4ea59e5-562b-4718-9e56-a0c001051f73" containerName="extract-utilities" Oct 06 15:03:47 crc kubenswrapper[4757]: I1006 15:03:47.694306 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="c4ea59e5-562b-4718-9e56-a0c001051f73" containerName="registry-server" Oct 06 15:03:47 crc kubenswrapper[4757]: I1006 15:03:47.695580 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zxglw" Oct 06 15:03:47 crc kubenswrapper[4757]: I1006 15:03:47.708871 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-zxglw"] Oct 06 15:03:47 crc kubenswrapper[4757]: I1006 15:03:47.883798 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e5b4b073-72a3-46a6-b91d-4a386bd30ad7-utilities\") pod \"redhat-operators-zxglw\" (UID: \"e5b4b073-72a3-46a6-b91d-4a386bd30ad7\") " pod="openshift-marketplace/redhat-operators-zxglw" Oct 06 15:03:47 crc kubenswrapper[4757]: I1006 15:03:47.883875 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e5b4b073-72a3-46a6-b91d-4a386bd30ad7-catalog-content\") pod \"redhat-operators-zxglw\" (UID: \"e5b4b073-72a3-46a6-b91d-4a386bd30ad7\") " pod="openshift-marketplace/redhat-operators-zxglw" Oct 06 15:03:47 crc kubenswrapper[4757]: I1006 15:03:47.884532 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qjsmq\" (UniqueName: \"kubernetes.io/projected/e5b4b073-72a3-46a6-b91d-4a386bd30ad7-kube-api-access-qjsmq\") pod \"redhat-operators-zxglw\" (UID: \"e5b4b073-72a3-46a6-b91d-4a386bd30ad7\") " pod="openshift-marketplace/redhat-operators-zxglw" Oct 06 15:03:47 crc kubenswrapper[4757]: I1006 15:03:47.986044 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e5b4b073-72a3-46a6-b91d-4a386bd30ad7-utilities\") pod \"redhat-operators-zxglw\" (UID: \"e5b4b073-72a3-46a6-b91d-4a386bd30ad7\") " pod="openshift-marketplace/redhat-operators-zxglw" Oct 06 15:03:47 crc kubenswrapper[4757]: I1006 15:03:47.986120 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e5b4b073-72a3-46a6-b91d-4a386bd30ad7-catalog-content\") pod \"redhat-operators-zxglw\" (UID: \"e5b4b073-72a3-46a6-b91d-4a386bd30ad7\") " pod="openshift-marketplace/redhat-operators-zxglw" Oct 06 15:03:47 crc kubenswrapper[4757]: I1006 15:03:47.986167 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qjsmq\" (UniqueName: \"kubernetes.io/projected/e5b4b073-72a3-46a6-b91d-4a386bd30ad7-kube-api-access-qjsmq\") pod \"redhat-operators-zxglw\" (UID: \"e5b4b073-72a3-46a6-b91d-4a386bd30ad7\") " pod="openshift-marketplace/redhat-operators-zxglw" Oct 06 15:03:47 crc kubenswrapper[4757]: I1006 15:03:47.986651 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e5b4b073-72a3-46a6-b91d-4a386bd30ad7-utilities\") pod \"redhat-operators-zxglw\" (UID: \"e5b4b073-72a3-46a6-b91d-4a386bd30ad7\") " pod="openshift-marketplace/redhat-operators-zxglw" Oct 06 15:03:47 crc kubenswrapper[4757]: I1006 15:03:47.986814 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e5b4b073-72a3-46a6-b91d-4a386bd30ad7-catalog-content\") pod \"redhat-operators-zxglw\" (UID: \"e5b4b073-72a3-46a6-b91d-4a386bd30ad7\") " pod="openshift-marketplace/redhat-operators-zxglw" Oct 06 15:03:48 crc kubenswrapper[4757]: I1006 15:03:48.008322 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qjsmq\" (UniqueName: \"kubernetes.io/projected/e5b4b073-72a3-46a6-b91d-4a386bd30ad7-kube-api-access-qjsmq\") pod \"redhat-operators-zxglw\" (UID: \"e5b4b073-72a3-46a6-b91d-4a386bd30ad7\") " pod="openshift-marketplace/redhat-operators-zxglw" Oct 06 15:03:48 crc kubenswrapper[4757]: I1006 15:03:48.013953 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zxglw" Oct 06 15:03:48 crc kubenswrapper[4757]: I1006 15:03:48.421202 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-zxglw"] Oct 06 15:03:48 crc kubenswrapper[4757]: I1006 15:03:48.612170 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zxglw" event={"ID":"e5b4b073-72a3-46a6-b91d-4a386bd30ad7","Type":"ContainerStarted","Data":"8f4cf89ea73b2a5792f307cb2d49fa987d2d0a839f8451fe34f0fb42dba15c6b"} Oct 06 15:03:49 crc kubenswrapper[4757]: I1006 15:03:49.625354 4757 generic.go:334] "Generic (PLEG): container finished" podID="e5b4b073-72a3-46a6-b91d-4a386bd30ad7" containerID="18ab66449530784c34f3c3627f2afdf2152d7ee8bb5eb70ffbe81aea2edf914a" exitCode=0 Oct 06 15:03:49 crc kubenswrapper[4757]: I1006 15:03:49.625439 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zxglw" event={"ID":"e5b4b073-72a3-46a6-b91d-4a386bd30ad7","Type":"ContainerDied","Data":"18ab66449530784c34f3c3627f2afdf2152d7ee8bb5eb70ffbe81aea2edf914a"} Oct 06 15:03:51 crc kubenswrapper[4757]: I1006 15:03:51.655152 4757 generic.go:334] "Generic (PLEG): container finished" podID="e5b4b073-72a3-46a6-b91d-4a386bd30ad7" containerID="eb5864d9a1b3c85a14062554ecef9508c4515b8e9249832ecfa2b10cc292a1f6" exitCode=0 Oct 06 15:03:51 crc kubenswrapper[4757]: I1006 15:03:51.655231 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zxglw" event={"ID":"e5b4b073-72a3-46a6-b91d-4a386bd30ad7","Type":"ContainerDied","Data":"eb5864d9a1b3c85a14062554ecef9508c4515b8e9249832ecfa2b10cc292a1f6"} Oct 06 15:03:52 crc kubenswrapper[4757]: I1006 15:03:52.666964 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zxglw" event={"ID":"e5b4b073-72a3-46a6-b91d-4a386bd30ad7","Type":"ContainerStarted","Data":"0f670bdd87aba5108284740097ad1f95fc204855234b0e1f95990bb162164d30"} Oct 06 15:03:52 crc kubenswrapper[4757]: I1006 15:03:52.684553 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-zxglw" podStartSLOduration=3.268137456 podStartE2EDuration="5.684531064s" podCreationTimestamp="2025-10-06 15:03:47 +0000 UTC" firstStartedPulling="2025-10-06 15:03:49.627717502 +0000 UTC m=+5118.125036049" lastFinishedPulling="2025-10-06 15:03:52.04411112 +0000 UTC m=+5120.541429657" observedRunningTime="2025-10-06 15:03:52.682524859 +0000 UTC m=+5121.179843406" watchObservedRunningTime="2025-10-06 15:03:52.684531064 +0000 UTC m=+5121.181849621" Oct 06 15:03:55 crc kubenswrapper[4757]: I1006 15:03:55.180226 4757 scope.go:117] "RemoveContainer" containerID="5c4563212ce072d71c658e00c8096ab0ddad498c69fa13e6bee8d2a76ec2f125" Oct 06 15:03:55 crc kubenswrapper[4757]: E1006 15:03:55.181114 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 15:03:58 crc kubenswrapper[4757]: I1006 15:03:58.015124 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-zxglw" Oct 06 15:03:58 crc kubenswrapper[4757]: I1006 15:03:58.015452 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-zxglw" Oct 06 15:03:58 crc kubenswrapper[4757]: I1006 15:03:58.054152 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-zxglw" Oct 06 15:03:58 crc kubenswrapper[4757]: I1006 15:03:58.754560 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-zxglw" Oct 06 15:03:58 crc kubenswrapper[4757]: I1006 15:03:58.817698 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-zxglw"] Oct 06 15:04:00 crc kubenswrapper[4757]: I1006 15:04:00.723138 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-zxglw" podUID="e5b4b073-72a3-46a6-b91d-4a386bd30ad7" containerName="registry-server" containerID="cri-o://0f670bdd87aba5108284740097ad1f95fc204855234b0e1f95990bb162164d30" gracePeriod=2 Oct 06 15:04:01 crc kubenswrapper[4757]: I1006 15:04:01.120734 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zxglw" Oct 06 15:04:01 crc kubenswrapper[4757]: I1006 15:04:01.308966 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qjsmq\" (UniqueName: \"kubernetes.io/projected/e5b4b073-72a3-46a6-b91d-4a386bd30ad7-kube-api-access-qjsmq\") pod \"e5b4b073-72a3-46a6-b91d-4a386bd30ad7\" (UID: \"e5b4b073-72a3-46a6-b91d-4a386bd30ad7\") " Oct 06 15:04:01 crc kubenswrapper[4757]: I1006 15:04:01.309062 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e5b4b073-72a3-46a6-b91d-4a386bd30ad7-utilities\") pod \"e5b4b073-72a3-46a6-b91d-4a386bd30ad7\" (UID: \"e5b4b073-72a3-46a6-b91d-4a386bd30ad7\") " Oct 06 15:04:01 crc kubenswrapper[4757]: I1006 15:04:01.309283 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e5b4b073-72a3-46a6-b91d-4a386bd30ad7-catalog-content\") pod \"e5b4b073-72a3-46a6-b91d-4a386bd30ad7\" (UID: \"e5b4b073-72a3-46a6-b91d-4a386bd30ad7\") " Oct 06 15:04:01 crc kubenswrapper[4757]: I1006 15:04:01.310566 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e5b4b073-72a3-46a6-b91d-4a386bd30ad7-utilities" (OuterVolumeSpecName: "utilities") pod "e5b4b073-72a3-46a6-b91d-4a386bd30ad7" (UID: "e5b4b073-72a3-46a6-b91d-4a386bd30ad7"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 15:04:01 crc kubenswrapper[4757]: I1006 15:04:01.310924 4757 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e5b4b073-72a3-46a6-b91d-4a386bd30ad7-utilities\") on node \"crc\" DevicePath \"\"" Oct 06 15:04:01 crc kubenswrapper[4757]: I1006 15:04:01.319043 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e5b4b073-72a3-46a6-b91d-4a386bd30ad7-kube-api-access-qjsmq" (OuterVolumeSpecName: "kube-api-access-qjsmq") pod "e5b4b073-72a3-46a6-b91d-4a386bd30ad7" (UID: "e5b4b073-72a3-46a6-b91d-4a386bd30ad7"). InnerVolumeSpecName "kube-api-access-qjsmq". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 15:04:01 crc kubenswrapper[4757]: I1006 15:04:01.411637 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qjsmq\" (UniqueName: \"kubernetes.io/projected/e5b4b073-72a3-46a6-b91d-4a386bd30ad7-kube-api-access-qjsmq\") on node \"crc\" DevicePath \"\"" Oct 06 15:04:01 crc kubenswrapper[4757]: I1006 15:04:01.737112 4757 generic.go:334] "Generic (PLEG): container finished" podID="e5b4b073-72a3-46a6-b91d-4a386bd30ad7" containerID="0f670bdd87aba5108284740097ad1f95fc204855234b0e1f95990bb162164d30" exitCode=0 Oct 06 15:04:01 crc kubenswrapper[4757]: I1006 15:04:01.737156 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zxglw" event={"ID":"e5b4b073-72a3-46a6-b91d-4a386bd30ad7","Type":"ContainerDied","Data":"0f670bdd87aba5108284740097ad1f95fc204855234b0e1f95990bb162164d30"} Oct 06 15:04:01 crc kubenswrapper[4757]: I1006 15:04:01.737187 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zxglw" event={"ID":"e5b4b073-72a3-46a6-b91d-4a386bd30ad7","Type":"ContainerDied","Data":"8f4cf89ea73b2a5792f307cb2d49fa987d2d0a839f8451fe34f0fb42dba15c6b"} Oct 06 15:04:01 crc kubenswrapper[4757]: I1006 15:04:01.737206 4757 scope.go:117] "RemoveContainer" containerID="0f670bdd87aba5108284740097ad1f95fc204855234b0e1f95990bb162164d30" Oct 06 15:04:01 crc kubenswrapper[4757]: I1006 15:04:01.737228 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zxglw" Oct 06 15:04:01 crc kubenswrapper[4757]: I1006 15:04:01.763396 4757 scope.go:117] "RemoveContainer" containerID="eb5864d9a1b3c85a14062554ecef9508c4515b8e9249832ecfa2b10cc292a1f6" Oct 06 15:04:01 crc kubenswrapper[4757]: I1006 15:04:01.788495 4757 scope.go:117] "RemoveContainer" containerID="18ab66449530784c34f3c3627f2afdf2152d7ee8bb5eb70ffbe81aea2edf914a" Oct 06 15:04:01 crc kubenswrapper[4757]: I1006 15:04:01.831730 4757 scope.go:117] "RemoveContainer" containerID="0f670bdd87aba5108284740097ad1f95fc204855234b0e1f95990bb162164d30" Oct 06 15:04:01 crc kubenswrapper[4757]: E1006 15:04:01.832477 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0f670bdd87aba5108284740097ad1f95fc204855234b0e1f95990bb162164d30\": container with ID starting with 0f670bdd87aba5108284740097ad1f95fc204855234b0e1f95990bb162164d30 not found: ID does not exist" containerID="0f670bdd87aba5108284740097ad1f95fc204855234b0e1f95990bb162164d30" Oct 06 15:04:01 crc kubenswrapper[4757]: I1006 15:04:01.832529 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0f670bdd87aba5108284740097ad1f95fc204855234b0e1f95990bb162164d30"} err="failed to get container status \"0f670bdd87aba5108284740097ad1f95fc204855234b0e1f95990bb162164d30\": rpc error: code = NotFound desc = could not find container \"0f670bdd87aba5108284740097ad1f95fc204855234b0e1f95990bb162164d30\": container with ID starting with 0f670bdd87aba5108284740097ad1f95fc204855234b0e1f95990bb162164d30 not found: ID does not exist" Oct 06 15:04:01 crc kubenswrapper[4757]: I1006 15:04:01.832566 4757 scope.go:117] "RemoveContainer" containerID="eb5864d9a1b3c85a14062554ecef9508c4515b8e9249832ecfa2b10cc292a1f6" Oct 06 15:04:01 crc kubenswrapper[4757]: E1006 15:04:01.833004 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eb5864d9a1b3c85a14062554ecef9508c4515b8e9249832ecfa2b10cc292a1f6\": container with ID starting with eb5864d9a1b3c85a14062554ecef9508c4515b8e9249832ecfa2b10cc292a1f6 not found: ID does not exist" containerID="eb5864d9a1b3c85a14062554ecef9508c4515b8e9249832ecfa2b10cc292a1f6" Oct 06 15:04:01 crc kubenswrapper[4757]: I1006 15:04:01.833044 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eb5864d9a1b3c85a14062554ecef9508c4515b8e9249832ecfa2b10cc292a1f6"} err="failed to get container status \"eb5864d9a1b3c85a14062554ecef9508c4515b8e9249832ecfa2b10cc292a1f6\": rpc error: code = NotFound desc = could not find container \"eb5864d9a1b3c85a14062554ecef9508c4515b8e9249832ecfa2b10cc292a1f6\": container with ID starting with eb5864d9a1b3c85a14062554ecef9508c4515b8e9249832ecfa2b10cc292a1f6 not found: ID does not exist" Oct 06 15:04:01 crc kubenswrapper[4757]: I1006 15:04:01.833070 4757 scope.go:117] "RemoveContainer" containerID="18ab66449530784c34f3c3627f2afdf2152d7ee8bb5eb70ffbe81aea2edf914a" Oct 06 15:04:01 crc kubenswrapper[4757]: E1006 15:04:01.833623 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"18ab66449530784c34f3c3627f2afdf2152d7ee8bb5eb70ffbe81aea2edf914a\": container with ID starting with 18ab66449530784c34f3c3627f2afdf2152d7ee8bb5eb70ffbe81aea2edf914a not found: ID does not exist" containerID="18ab66449530784c34f3c3627f2afdf2152d7ee8bb5eb70ffbe81aea2edf914a" Oct 06 15:04:01 crc kubenswrapper[4757]: I1006 15:04:01.833765 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"18ab66449530784c34f3c3627f2afdf2152d7ee8bb5eb70ffbe81aea2edf914a"} err="failed to get container status \"18ab66449530784c34f3c3627f2afdf2152d7ee8bb5eb70ffbe81aea2edf914a\": rpc error: code = NotFound desc = could not find container \"18ab66449530784c34f3c3627f2afdf2152d7ee8bb5eb70ffbe81aea2edf914a\": container with ID starting with 18ab66449530784c34f3c3627f2afdf2152d7ee8bb5eb70ffbe81aea2edf914a not found: ID does not exist" Oct 06 15:04:02 crc kubenswrapper[4757]: I1006 15:04:02.091942 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e5b4b073-72a3-46a6-b91d-4a386bd30ad7-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e5b4b073-72a3-46a6-b91d-4a386bd30ad7" (UID: "e5b4b073-72a3-46a6-b91d-4a386bd30ad7"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 15:04:02 crc kubenswrapper[4757]: I1006 15:04:02.121734 4757 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e5b4b073-72a3-46a6-b91d-4a386bd30ad7-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 06 15:04:02 crc kubenswrapper[4757]: I1006 15:04:02.367209 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-zxglw"] Oct 06 15:04:02 crc kubenswrapper[4757]: I1006 15:04:02.377281 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-zxglw"] Oct 06 15:04:04 crc kubenswrapper[4757]: I1006 15:04:04.197038 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e5b4b073-72a3-46a6-b91d-4a386bd30ad7" path="/var/lib/kubelet/pods/e5b4b073-72a3-46a6-b91d-4a386bd30ad7/volumes" Oct 06 15:04:06 crc kubenswrapper[4757]: I1006 15:04:06.181262 4757 scope.go:117] "RemoveContainer" containerID="5c4563212ce072d71c658e00c8096ab0ddad498c69fa13e6bee8d2a76ec2f125" Oct 06 15:04:06 crc kubenswrapper[4757]: E1006 15:04:06.181865 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 15:04:18 crc kubenswrapper[4757]: I1006 15:04:18.180078 4757 scope.go:117] "RemoveContainer" containerID="5c4563212ce072d71c658e00c8096ab0ddad498c69fa13e6bee8d2a76ec2f125" Oct 06 15:04:18 crc kubenswrapper[4757]: E1006 15:04:18.180858 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 15:04:31 crc kubenswrapper[4757]: I1006 15:04:31.180191 4757 scope.go:117] "RemoveContainer" containerID="5c4563212ce072d71c658e00c8096ab0ddad498c69fa13e6bee8d2a76ec2f125" Oct 06 15:04:31 crc kubenswrapper[4757]: E1006 15:04:31.181357 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 15:04:37 crc kubenswrapper[4757]: I1006 15:04:37.542532 4757 scope.go:117] "RemoveContainer" containerID="a53e77c23399ece65bfb873f4b595d36079c6f459b84c526a54bafe3c6a43ee2" Oct 06 15:04:45 crc kubenswrapper[4757]: I1006 15:04:45.180557 4757 scope.go:117] "RemoveContainer" containerID="5c4563212ce072d71c658e00c8096ab0ddad498c69fa13e6bee8d2a76ec2f125" Oct 06 15:04:45 crc kubenswrapper[4757]: E1006 15:04:45.181590 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 15:04:59 crc kubenswrapper[4757]: I1006 15:04:59.180682 4757 scope.go:117] "RemoveContainer" containerID="5c4563212ce072d71c658e00c8096ab0ddad498c69fa13e6bee8d2a76ec2f125" Oct 06 15:04:59 crc kubenswrapper[4757]: E1006 15:04:59.181716 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 15:05:10 crc kubenswrapper[4757]: I1006 15:05:10.180520 4757 scope.go:117] "RemoveContainer" containerID="5c4563212ce072d71c658e00c8096ab0ddad498c69fa13e6bee8d2a76ec2f125" Oct 06 15:05:10 crc kubenswrapper[4757]: E1006 15:05:10.182976 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 15:05:24 crc kubenswrapper[4757]: I1006 15:05:24.179869 4757 scope.go:117] "RemoveContainer" containerID="5c4563212ce072d71c658e00c8096ab0ddad498c69fa13e6bee8d2a76ec2f125" Oct 06 15:05:24 crc kubenswrapper[4757]: E1006 15:05:24.180602 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 15:05:39 crc kubenswrapper[4757]: I1006 15:05:39.181821 4757 scope.go:117] "RemoveContainer" containerID="5c4563212ce072d71c658e00c8096ab0ddad498c69fa13e6bee8d2a76ec2f125" Oct 06 15:05:39 crc kubenswrapper[4757]: E1006 15:05:39.183140 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 15:05:51 crc kubenswrapper[4757]: I1006 15:05:51.007946 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-copy-data"] Oct 06 15:05:51 crc kubenswrapper[4757]: E1006 15:05:51.009210 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e5b4b073-72a3-46a6-b91d-4a386bd30ad7" containerName="extract-utilities" Oct 06 15:05:51 crc kubenswrapper[4757]: I1006 15:05:51.009237 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="e5b4b073-72a3-46a6-b91d-4a386bd30ad7" containerName="extract-utilities" Oct 06 15:05:51 crc kubenswrapper[4757]: E1006 15:05:51.009278 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e5b4b073-72a3-46a6-b91d-4a386bd30ad7" containerName="registry-server" Oct 06 15:05:51 crc kubenswrapper[4757]: I1006 15:05:51.009292 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="e5b4b073-72a3-46a6-b91d-4a386bd30ad7" containerName="registry-server" Oct 06 15:05:51 crc kubenswrapper[4757]: E1006 15:05:51.009323 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e5b4b073-72a3-46a6-b91d-4a386bd30ad7" containerName="extract-content" Oct 06 15:05:51 crc kubenswrapper[4757]: I1006 15:05:51.009336 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="e5b4b073-72a3-46a6-b91d-4a386bd30ad7" containerName="extract-content" Oct 06 15:05:51 crc kubenswrapper[4757]: I1006 15:05:51.009670 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="e5b4b073-72a3-46a6-b91d-4a386bd30ad7" containerName="registry-server" Oct 06 15:05:51 crc kubenswrapper[4757]: I1006 15:05:51.010564 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-copy-data" Oct 06 15:05:51 crc kubenswrapper[4757]: I1006 15:05:51.013130 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-s55mr" Oct 06 15:05:51 crc kubenswrapper[4757]: I1006 15:05:51.029409 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-copy-data"] Oct 06 15:05:51 crc kubenswrapper[4757]: I1006 15:05:51.185621 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-7614cf03-d726-46be-b0fd-2de39f59a660\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-7614cf03-d726-46be-b0fd-2de39f59a660\") pod \"mariadb-copy-data\" (UID: \"e5f3a4b5-f137-498a-a993-556989a82d82\") " pod="openstack/mariadb-copy-data" Oct 06 15:05:51 crc kubenswrapper[4757]: I1006 15:05:51.185819 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mbdzs\" (UniqueName: \"kubernetes.io/projected/e5f3a4b5-f137-498a-a993-556989a82d82-kube-api-access-mbdzs\") pod \"mariadb-copy-data\" (UID: \"e5f3a4b5-f137-498a-a993-556989a82d82\") " pod="openstack/mariadb-copy-data" Oct 06 15:05:51 crc kubenswrapper[4757]: I1006 15:05:51.287307 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-7614cf03-d726-46be-b0fd-2de39f59a660\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-7614cf03-d726-46be-b0fd-2de39f59a660\") pod \"mariadb-copy-data\" (UID: \"e5f3a4b5-f137-498a-a993-556989a82d82\") " pod="openstack/mariadb-copy-data" Oct 06 15:05:51 crc kubenswrapper[4757]: I1006 15:05:51.287439 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mbdzs\" (UniqueName: \"kubernetes.io/projected/e5f3a4b5-f137-498a-a993-556989a82d82-kube-api-access-mbdzs\") pod \"mariadb-copy-data\" (UID: \"e5f3a4b5-f137-498a-a993-556989a82d82\") " pod="openstack/mariadb-copy-data" Oct 06 15:05:51 crc kubenswrapper[4757]: I1006 15:05:51.292847 4757 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Oct 06 15:05:51 crc kubenswrapper[4757]: I1006 15:05:51.292933 4757 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-7614cf03-d726-46be-b0fd-2de39f59a660\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-7614cf03-d726-46be-b0fd-2de39f59a660\") pod \"mariadb-copy-data\" (UID: \"e5f3a4b5-f137-498a-a993-556989a82d82\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/10eeaf4eb9fb7ce8c2c5c38bc43cdcee8940c2f18fb9089ba418441dd85d09bc/globalmount\"" pod="openstack/mariadb-copy-data" Oct 06 15:05:51 crc kubenswrapper[4757]: I1006 15:05:51.323408 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mbdzs\" (UniqueName: \"kubernetes.io/projected/e5f3a4b5-f137-498a-a993-556989a82d82-kube-api-access-mbdzs\") pod \"mariadb-copy-data\" (UID: \"e5f3a4b5-f137-498a-a993-556989a82d82\") " pod="openstack/mariadb-copy-data" Oct 06 15:05:51 crc kubenswrapper[4757]: I1006 15:05:51.333580 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-7614cf03-d726-46be-b0fd-2de39f59a660\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-7614cf03-d726-46be-b0fd-2de39f59a660\") pod \"mariadb-copy-data\" (UID: \"e5f3a4b5-f137-498a-a993-556989a82d82\") " pod="openstack/mariadb-copy-data" Oct 06 15:05:51 crc kubenswrapper[4757]: I1006 15:05:51.634122 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-copy-data" Oct 06 15:05:52 crc kubenswrapper[4757]: I1006 15:05:52.186138 4757 scope.go:117] "RemoveContainer" containerID="5c4563212ce072d71c658e00c8096ab0ddad498c69fa13e6bee8d2a76ec2f125" Oct 06 15:05:52 crc kubenswrapper[4757]: E1006 15:05:52.186782 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 15:05:52 crc kubenswrapper[4757]: I1006 15:05:52.228653 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-copy-data"] Oct 06 15:05:52 crc kubenswrapper[4757]: I1006 15:05:52.751031 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-copy-data" event={"ID":"e5f3a4b5-f137-498a-a993-556989a82d82","Type":"ContainerStarted","Data":"165cc960330ecab7ea502fc27f659895a5a04569882a2a8fa65c95508df5f28f"} Oct 06 15:05:52 crc kubenswrapper[4757]: I1006 15:05:52.751128 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-copy-data" event={"ID":"e5f3a4b5-f137-498a-a993-556989a82d82","Type":"ContainerStarted","Data":"4334d1c7263fa6bf9864ec330522b96e7b3296b837db6842f07f11cf03ab094e"} Oct 06 15:05:52 crc kubenswrapper[4757]: I1006 15:05:52.774846 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/mariadb-copy-data" podStartSLOduration=3.7748166039999997 podStartE2EDuration="3.774816604s" podCreationTimestamp="2025-10-06 15:05:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 15:05:52.769970948 +0000 UTC m=+5241.267289525" watchObservedRunningTime="2025-10-06 15:05:52.774816604 +0000 UTC m=+5241.272135171" Oct 06 15:05:54 crc kubenswrapper[4757]: I1006 15:05:54.482967 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client"] Oct 06 15:05:54 crc kubenswrapper[4757]: I1006 15:05:54.486049 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Oct 06 15:05:54 crc kubenswrapper[4757]: I1006 15:05:54.496121 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client"] Oct 06 15:05:54 crc kubenswrapper[4757]: I1006 15:05:54.545480 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nn7rt\" (UniqueName: \"kubernetes.io/projected/38f1fec4-eb8c-4da9-a7e0-5d9d304c803b-kube-api-access-nn7rt\") pod \"mariadb-client\" (UID: \"38f1fec4-eb8c-4da9-a7e0-5d9d304c803b\") " pod="openstack/mariadb-client" Oct 06 15:05:54 crc kubenswrapper[4757]: I1006 15:05:54.647506 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nn7rt\" (UniqueName: \"kubernetes.io/projected/38f1fec4-eb8c-4da9-a7e0-5d9d304c803b-kube-api-access-nn7rt\") pod \"mariadb-client\" (UID: \"38f1fec4-eb8c-4da9-a7e0-5d9d304c803b\") " pod="openstack/mariadb-client" Oct 06 15:05:54 crc kubenswrapper[4757]: I1006 15:05:54.668743 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nn7rt\" (UniqueName: \"kubernetes.io/projected/38f1fec4-eb8c-4da9-a7e0-5d9d304c803b-kube-api-access-nn7rt\") pod \"mariadb-client\" (UID: \"38f1fec4-eb8c-4da9-a7e0-5d9d304c803b\") " pod="openstack/mariadb-client" Oct 06 15:05:54 crc kubenswrapper[4757]: I1006 15:05:54.818713 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Oct 06 15:05:55 crc kubenswrapper[4757]: I1006 15:05:55.247920 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client"] Oct 06 15:05:55 crc kubenswrapper[4757]: I1006 15:05:55.778136 4757 generic.go:334] "Generic (PLEG): container finished" podID="38f1fec4-eb8c-4da9-a7e0-5d9d304c803b" containerID="5138545c07de1ffa5eefbd9d4ac4a7e039ade0fc93eda1f5ae1445771726047a" exitCode=0 Oct 06 15:05:55 crc kubenswrapper[4757]: I1006 15:05:55.778205 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"38f1fec4-eb8c-4da9-a7e0-5d9d304c803b","Type":"ContainerDied","Data":"5138545c07de1ffa5eefbd9d4ac4a7e039ade0fc93eda1f5ae1445771726047a"} Oct 06 15:05:55 crc kubenswrapper[4757]: I1006 15:05:55.778245 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"38f1fec4-eb8c-4da9-a7e0-5d9d304c803b","Type":"ContainerStarted","Data":"396a1d9124ecc20f4b706ffcf2c717701df1607700b18139bb9a5f875963d59c"} Oct 06 15:05:57 crc kubenswrapper[4757]: I1006 15:05:57.083800 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Oct 06 15:05:57 crc kubenswrapper[4757]: I1006 15:05:57.105508 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client_38f1fec4-eb8c-4da9-a7e0-5d9d304c803b/mariadb-client/0.log" Oct 06 15:05:57 crc kubenswrapper[4757]: I1006 15:05:57.130122 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client"] Oct 06 15:05:57 crc kubenswrapper[4757]: I1006 15:05:57.136021 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client"] Oct 06 15:05:57 crc kubenswrapper[4757]: I1006 15:05:57.187434 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nn7rt\" (UniqueName: \"kubernetes.io/projected/38f1fec4-eb8c-4da9-a7e0-5d9d304c803b-kube-api-access-nn7rt\") pod \"38f1fec4-eb8c-4da9-a7e0-5d9d304c803b\" (UID: \"38f1fec4-eb8c-4da9-a7e0-5d9d304c803b\") " Oct 06 15:05:57 crc kubenswrapper[4757]: I1006 15:05:57.195176 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/38f1fec4-eb8c-4da9-a7e0-5d9d304c803b-kube-api-access-nn7rt" (OuterVolumeSpecName: "kube-api-access-nn7rt") pod "38f1fec4-eb8c-4da9-a7e0-5d9d304c803b" (UID: "38f1fec4-eb8c-4da9-a7e0-5d9d304c803b"). InnerVolumeSpecName "kube-api-access-nn7rt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 15:05:57 crc kubenswrapper[4757]: I1006 15:05:57.276782 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client"] Oct 06 15:05:57 crc kubenswrapper[4757]: E1006 15:05:57.277079 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="38f1fec4-eb8c-4da9-a7e0-5d9d304c803b" containerName="mariadb-client" Oct 06 15:05:57 crc kubenswrapper[4757]: I1006 15:05:57.277108 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="38f1fec4-eb8c-4da9-a7e0-5d9d304c803b" containerName="mariadb-client" Oct 06 15:05:57 crc kubenswrapper[4757]: I1006 15:05:57.277295 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="38f1fec4-eb8c-4da9-a7e0-5d9d304c803b" containerName="mariadb-client" Oct 06 15:05:57 crc kubenswrapper[4757]: I1006 15:05:57.277771 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Oct 06 15:05:57 crc kubenswrapper[4757]: I1006 15:05:57.289255 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nn7rt\" (UniqueName: \"kubernetes.io/projected/38f1fec4-eb8c-4da9-a7e0-5d9d304c803b-kube-api-access-nn7rt\") on node \"crc\" DevicePath \"\"" Oct 06 15:05:57 crc kubenswrapper[4757]: I1006 15:05:57.294226 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client"] Oct 06 15:05:57 crc kubenswrapper[4757]: I1006 15:05:57.391104 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lvskx\" (UniqueName: \"kubernetes.io/projected/fd328c8e-e3d2-4f7d-ae6b-b2ffc06affca-kube-api-access-lvskx\") pod \"mariadb-client\" (UID: \"fd328c8e-e3d2-4f7d-ae6b-b2ffc06affca\") " pod="openstack/mariadb-client" Oct 06 15:05:57 crc kubenswrapper[4757]: I1006 15:05:57.493795 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lvskx\" (UniqueName: \"kubernetes.io/projected/fd328c8e-e3d2-4f7d-ae6b-b2ffc06affca-kube-api-access-lvskx\") pod \"mariadb-client\" (UID: \"fd328c8e-e3d2-4f7d-ae6b-b2ffc06affca\") " pod="openstack/mariadb-client" Oct 06 15:05:57 crc kubenswrapper[4757]: I1006 15:05:57.515253 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lvskx\" (UniqueName: \"kubernetes.io/projected/fd328c8e-e3d2-4f7d-ae6b-b2ffc06affca-kube-api-access-lvskx\") pod \"mariadb-client\" (UID: \"fd328c8e-e3d2-4f7d-ae6b-b2ffc06affca\") " pod="openstack/mariadb-client" Oct 06 15:05:57 crc kubenswrapper[4757]: I1006 15:05:57.602374 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Oct 06 15:05:57 crc kubenswrapper[4757]: I1006 15:05:57.792908 4757 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="396a1d9124ecc20f4b706ffcf2c717701df1607700b18139bb9a5f875963d59c" Oct 06 15:05:57 crc kubenswrapper[4757]: I1006 15:05:57.792959 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Oct 06 15:05:57 crc kubenswrapper[4757]: I1006 15:05:57.811553 4757 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/mariadb-client" oldPodUID="38f1fec4-eb8c-4da9-a7e0-5d9d304c803b" podUID="fd328c8e-e3d2-4f7d-ae6b-b2ffc06affca" Oct 06 15:05:58 crc kubenswrapper[4757]: I1006 15:05:58.014066 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client"] Oct 06 15:05:58 crc kubenswrapper[4757]: I1006 15:05:58.202606 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="38f1fec4-eb8c-4da9-a7e0-5d9d304c803b" path="/var/lib/kubelet/pods/38f1fec4-eb8c-4da9-a7e0-5d9d304c803b/volumes" Oct 06 15:05:58 crc kubenswrapper[4757]: I1006 15:05:58.803212 4757 generic.go:334] "Generic (PLEG): container finished" podID="fd328c8e-e3d2-4f7d-ae6b-b2ffc06affca" containerID="612a393b889d335e43ef7810afc3ade62307a4ed124e573200bf1683b59291f6" exitCode=0 Oct 06 15:05:58 crc kubenswrapper[4757]: I1006 15:05:58.803400 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"fd328c8e-e3d2-4f7d-ae6b-b2ffc06affca","Type":"ContainerDied","Data":"612a393b889d335e43ef7810afc3ade62307a4ed124e573200bf1683b59291f6"} Oct 06 15:05:58 crc kubenswrapper[4757]: I1006 15:05:58.803561 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"fd328c8e-e3d2-4f7d-ae6b-b2ffc06affca","Type":"ContainerStarted","Data":"571290fb14709f4f7a6ef2eb0a80925494a328c9744394911e7a0543504c755f"} Oct 06 15:06:00 crc kubenswrapper[4757]: I1006 15:06:00.050816 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Oct 06 15:06:00 crc kubenswrapper[4757]: I1006 15:06:00.067224 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client_fd328c8e-e3d2-4f7d-ae6b-b2ffc06affca/mariadb-client/0.log" Oct 06 15:06:00 crc kubenswrapper[4757]: I1006 15:06:00.094632 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client"] Oct 06 15:06:00 crc kubenswrapper[4757]: I1006 15:06:00.103854 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client"] Oct 06 15:06:00 crc kubenswrapper[4757]: I1006 15:06:00.132395 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lvskx\" (UniqueName: \"kubernetes.io/projected/fd328c8e-e3d2-4f7d-ae6b-b2ffc06affca-kube-api-access-lvskx\") pod \"fd328c8e-e3d2-4f7d-ae6b-b2ffc06affca\" (UID: \"fd328c8e-e3d2-4f7d-ae6b-b2ffc06affca\") " Oct 06 15:06:00 crc kubenswrapper[4757]: I1006 15:06:00.139217 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fd328c8e-e3d2-4f7d-ae6b-b2ffc06affca-kube-api-access-lvskx" (OuterVolumeSpecName: "kube-api-access-lvskx") pod "fd328c8e-e3d2-4f7d-ae6b-b2ffc06affca" (UID: "fd328c8e-e3d2-4f7d-ae6b-b2ffc06affca"). InnerVolumeSpecName "kube-api-access-lvskx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 15:06:00 crc kubenswrapper[4757]: I1006 15:06:00.189914 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fd328c8e-e3d2-4f7d-ae6b-b2ffc06affca" path="/var/lib/kubelet/pods/fd328c8e-e3d2-4f7d-ae6b-b2ffc06affca/volumes" Oct 06 15:06:00 crc kubenswrapper[4757]: I1006 15:06:00.235026 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lvskx\" (UniqueName: \"kubernetes.io/projected/fd328c8e-e3d2-4f7d-ae6b-b2ffc06affca-kube-api-access-lvskx\") on node \"crc\" DevicePath \"\"" Oct 06 15:06:00 crc kubenswrapper[4757]: I1006 15:06:00.817441 4757 scope.go:117] "RemoveContainer" containerID="612a393b889d335e43ef7810afc3ade62307a4ed124e573200bf1683b59291f6" Oct 06 15:06:00 crc kubenswrapper[4757]: I1006 15:06:00.817479 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Oct 06 15:06:04 crc kubenswrapper[4757]: I1006 15:06:04.308328 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-x79pc"] Oct 06 15:06:04 crc kubenswrapper[4757]: E1006 15:06:04.309192 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd328c8e-e3d2-4f7d-ae6b-b2ffc06affca" containerName="mariadb-client" Oct 06 15:06:04 crc kubenswrapper[4757]: I1006 15:06:04.309207 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd328c8e-e3d2-4f7d-ae6b-b2ffc06affca" containerName="mariadb-client" Oct 06 15:06:04 crc kubenswrapper[4757]: I1006 15:06:04.309367 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="fd328c8e-e3d2-4f7d-ae6b-b2ffc06affca" containerName="mariadb-client" Oct 06 15:06:04 crc kubenswrapper[4757]: I1006 15:06:04.310824 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-x79pc" Oct 06 15:06:04 crc kubenswrapper[4757]: I1006 15:06:04.313817 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-x79pc"] Oct 06 15:06:04 crc kubenswrapper[4757]: I1006 15:06:04.404701 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8b58f202-91cd-4fd9-8aeb-ef6f51b890e1-utilities\") pod \"community-operators-x79pc\" (UID: \"8b58f202-91cd-4fd9-8aeb-ef6f51b890e1\") " pod="openshift-marketplace/community-operators-x79pc" Oct 06 15:06:04 crc kubenswrapper[4757]: I1006 15:06:04.404752 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8b58f202-91cd-4fd9-8aeb-ef6f51b890e1-catalog-content\") pod \"community-operators-x79pc\" (UID: \"8b58f202-91cd-4fd9-8aeb-ef6f51b890e1\") " pod="openshift-marketplace/community-operators-x79pc" Oct 06 15:06:04 crc kubenswrapper[4757]: I1006 15:06:04.404809 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mjn22\" (UniqueName: \"kubernetes.io/projected/8b58f202-91cd-4fd9-8aeb-ef6f51b890e1-kube-api-access-mjn22\") pod \"community-operators-x79pc\" (UID: \"8b58f202-91cd-4fd9-8aeb-ef6f51b890e1\") " pod="openshift-marketplace/community-operators-x79pc" Oct 06 15:06:04 crc kubenswrapper[4757]: I1006 15:06:04.506154 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8b58f202-91cd-4fd9-8aeb-ef6f51b890e1-utilities\") pod \"community-operators-x79pc\" (UID: \"8b58f202-91cd-4fd9-8aeb-ef6f51b890e1\") " pod="openshift-marketplace/community-operators-x79pc" Oct 06 15:06:04 crc kubenswrapper[4757]: I1006 15:06:04.506208 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8b58f202-91cd-4fd9-8aeb-ef6f51b890e1-catalog-content\") pod \"community-operators-x79pc\" (UID: \"8b58f202-91cd-4fd9-8aeb-ef6f51b890e1\") " pod="openshift-marketplace/community-operators-x79pc" Oct 06 15:06:04 crc kubenswrapper[4757]: I1006 15:06:04.506270 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mjn22\" (UniqueName: \"kubernetes.io/projected/8b58f202-91cd-4fd9-8aeb-ef6f51b890e1-kube-api-access-mjn22\") pod \"community-operators-x79pc\" (UID: \"8b58f202-91cd-4fd9-8aeb-ef6f51b890e1\") " pod="openshift-marketplace/community-operators-x79pc" Oct 06 15:06:04 crc kubenswrapper[4757]: I1006 15:06:04.506661 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8b58f202-91cd-4fd9-8aeb-ef6f51b890e1-utilities\") pod \"community-operators-x79pc\" (UID: \"8b58f202-91cd-4fd9-8aeb-ef6f51b890e1\") " pod="openshift-marketplace/community-operators-x79pc" Oct 06 15:06:04 crc kubenswrapper[4757]: I1006 15:06:04.506694 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8b58f202-91cd-4fd9-8aeb-ef6f51b890e1-catalog-content\") pod \"community-operators-x79pc\" (UID: \"8b58f202-91cd-4fd9-8aeb-ef6f51b890e1\") " pod="openshift-marketplace/community-operators-x79pc" Oct 06 15:06:04 crc kubenswrapper[4757]: I1006 15:06:04.526225 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mjn22\" (UniqueName: \"kubernetes.io/projected/8b58f202-91cd-4fd9-8aeb-ef6f51b890e1-kube-api-access-mjn22\") pod \"community-operators-x79pc\" (UID: \"8b58f202-91cd-4fd9-8aeb-ef6f51b890e1\") " pod="openshift-marketplace/community-operators-x79pc" Oct 06 15:06:04 crc kubenswrapper[4757]: I1006 15:06:04.629346 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-x79pc" Oct 06 15:06:05 crc kubenswrapper[4757]: I1006 15:06:05.123736 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-x79pc"] Oct 06 15:06:05 crc kubenswrapper[4757]: I1006 15:06:05.180993 4757 scope.go:117] "RemoveContainer" containerID="5c4563212ce072d71c658e00c8096ab0ddad498c69fa13e6bee8d2a76ec2f125" Oct 06 15:06:05 crc kubenswrapper[4757]: E1006 15:06:05.181653 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 15:06:05 crc kubenswrapper[4757]: I1006 15:06:05.863509 4757 generic.go:334] "Generic (PLEG): container finished" podID="8b58f202-91cd-4fd9-8aeb-ef6f51b890e1" containerID="966e3e0ca06e18aed4ea2299fe25475089d2ef9bcbe38c87354c6cc9cf12c114" exitCode=0 Oct 06 15:06:05 crc kubenswrapper[4757]: I1006 15:06:05.863686 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-x79pc" event={"ID":"8b58f202-91cd-4fd9-8aeb-ef6f51b890e1","Type":"ContainerDied","Data":"966e3e0ca06e18aed4ea2299fe25475089d2ef9bcbe38c87354c6cc9cf12c114"} Oct 06 15:06:05 crc kubenswrapper[4757]: I1006 15:06:05.863843 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-x79pc" event={"ID":"8b58f202-91cd-4fd9-8aeb-ef6f51b890e1","Type":"ContainerStarted","Data":"bd6e672dfeab71ab2bdc743d67823162e22a776655458ca101e2fda6f0f988b6"} Oct 06 15:06:06 crc kubenswrapper[4757]: I1006 15:06:06.873682 4757 generic.go:334] "Generic (PLEG): container finished" podID="8b58f202-91cd-4fd9-8aeb-ef6f51b890e1" containerID="99f36b3b7e9b0ab4910caac6b8cfa59bf2f71443a84c78d33eef074588ca4783" exitCode=0 Oct 06 15:06:06 crc kubenswrapper[4757]: I1006 15:06:06.873735 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-x79pc" event={"ID":"8b58f202-91cd-4fd9-8aeb-ef6f51b890e1","Type":"ContainerDied","Data":"99f36b3b7e9b0ab4910caac6b8cfa59bf2f71443a84c78d33eef074588ca4783"} Oct 06 15:06:07 crc kubenswrapper[4757]: I1006 15:06:07.882157 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-x79pc" event={"ID":"8b58f202-91cd-4fd9-8aeb-ef6f51b890e1","Type":"ContainerStarted","Data":"3ca0b04ce109d1564491f4a49e354513b15fbb9cbb3016431c7507b0f672d890"} Oct 06 15:06:07 crc kubenswrapper[4757]: I1006 15:06:07.904745 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-x79pc" podStartSLOduration=2.386169384 podStartE2EDuration="3.904725802s" podCreationTimestamp="2025-10-06 15:06:04 +0000 UTC" firstStartedPulling="2025-10-06 15:06:05.865820213 +0000 UTC m=+5254.363138770" lastFinishedPulling="2025-10-06 15:06:07.384376651 +0000 UTC m=+5255.881695188" observedRunningTime="2025-10-06 15:06:07.900268838 +0000 UTC m=+5256.397587385" watchObservedRunningTime="2025-10-06 15:06:07.904725802 +0000 UTC m=+5256.402044339" Oct 06 15:06:14 crc kubenswrapper[4757]: I1006 15:06:14.630235 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-x79pc" Oct 06 15:06:14 crc kubenswrapper[4757]: I1006 15:06:14.630826 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-x79pc" Oct 06 15:06:14 crc kubenswrapper[4757]: I1006 15:06:14.696337 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-x79pc" Oct 06 15:06:14 crc kubenswrapper[4757]: I1006 15:06:14.984216 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-x79pc" Oct 06 15:06:15 crc kubenswrapper[4757]: I1006 15:06:15.021827 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-x79pc"] Oct 06 15:06:16 crc kubenswrapper[4757]: I1006 15:06:16.180854 4757 scope.go:117] "RemoveContainer" containerID="5c4563212ce072d71c658e00c8096ab0ddad498c69fa13e6bee8d2a76ec2f125" Oct 06 15:06:16 crc kubenswrapper[4757]: E1006 15:06:16.181526 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 15:06:16 crc kubenswrapper[4757]: I1006 15:06:16.954511 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-x79pc" podUID="8b58f202-91cd-4fd9-8aeb-ef6f51b890e1" containerName="registry-server" containerID="cri-o://3ca0b04ce109d1564491f4a49e354513b15fbb9cbb3016431c7507b0f672d890" gracePeriod=2 Oct 06 15:06:17 crc kubenswrapper[4757]: I1006 15:06:17.329235 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-x79pc" Oct 06 15:06:17 crc kubenswrapper[4757]: I1006 15:06:17.409274 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8b58f202-91cd-4fd9-8aeb-ef6f51b890e1-catalog-content\") pod \"8b58f202-91cd-4fd9-8aeb-ef6f51b890e1\" (UID: \"8b58f202-91cd-4fd9-8aeb-ef6f51b890e1\") " Oct 06 15:06:17 crc kubenswrapper[4757]: I1006 15:06:17.409365 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8b58f202-91cd-4fd9-8aeb-ef6f51b890e1-utilities\") pod \"8b58f202-91cd-4fd9-8aeb-ef6f51b890e1\" (UID: \"8b58f202-91cd-4fd9-8aeb-ef6f51b890e1\") " Oct 06 15:06:17 crc kubenswrapper[4757]: I1006 15:06:17.409549 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mjn22\" (UniqueName: \"kubernetes.io/projected/8b58f202-91cd-4fd9-8aeb-ef6f51b890e1-kube-api-access-mjn22\") pod \"8b58f202-91cd-4fd9-8aeb-ef6f51b890e1\" (UID: \"8b58f202-91cd-4fd9-8aeb-ef6f51b890e1\") " Oct 06 15:06:17 crc kubenswrapper[4757]: I1006 15:06:17.411005 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8b58f202-91cd-4fd9-8aeb-ef6f51b890e1-utilities" (OuterVolumeSpecName: "utilities") pod "8b58f202-91cd-4fd9-8aeb-ef6f51b890e1" (UID: "8b58f202-91cd-4fd9-8aeb-ef6f51b890e1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 15:06:17 crc kubenswrapper[4757]: I1006 15:06:17.415537 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8b58f202-91cd-4fd9-8aeb-ef6f51b890e1-kube-api-access-mjn22" (OuterVolumeSpecName: "kube-api-access-mjn22") pod "8b58f202-91cd-4fd9-8aeb-ef6f51b890e1" (UID: "8b58f202-91cd-4fd9-8aeb-ef6f51b890e1"). InnerVolumeSpecName "kube-api-access-mjn22". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 15:06:17 crc kubenswrapper[4757]: I1006 15:06:17.471006 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8b58f202-91cd-4fd9-8aeb-ef6f51b890e1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8b58f202-91cd-4fd9-8aeb-ef6f51b890e1" (UID: "8b58f202-91cd-4fd9-8aeb-ef6f51b890e1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 15:06:17 crc kubenswrapper[4757]: I1006 15:06:17.511687 4757 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8b58f202-91cd-4fd9-8aeb-ef6f51b890e1-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 06 15:06:17 crc kubenswrapper[4757]: I1006 15:06:17.511722 4757 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8b58f202-91cd-4fd9-8aeb-ef6f51b890e1-utilities\") on node \"crc\" DevicePath \"\"" Oct 06 15:06:17 crc kubenswrapper[4757]: I1006 15:06:17.511734 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mjn22\" (UniqueName: \"kubernetes.io/projected/8b58f202-91cd-4fd9-8aeb-ef6f51b890e1-kube-api-access-mjn22\") on node \"crc\" DevicePath \"\"" Oct 06 15:06:17 crc kubenswrapper[4757]: I1006 15:06:17.963594 4757 generic.go:334] "Generic (PLEG): container finished" podID="8b58f202-91cd-4fd9-8aeb-ef6f51b890e1" containerID="3ca0b04ce109d1564491f4a49e354513b15fbb9cbb3016431c7507b0f672d890" exitCode=0 Oct 06 15:06:17 crc kubenswrapper[4757]: I1006 15:06:17.963642 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-x79pc" event={"ID":"8b58f202-91cd-4fd9-8aeb-ef6f51b890e1","Type":"ContainerDied","Data":"3ca0b04ce109d1564491f4a49e354513b15fbb9cbb3016431c7507b0f672d890"} Oct 06 15:06:17 crc kubenswrapper[4757]: I1006 15:06:17.963678 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-x79pc" event={"ID":"8b58f202-91cd-4fd9-8aeb-ef6f51b890e1","Type":"ContainerDied","Data":"bd6e672dfeab71ab2bdc743d67823162e22a776655458ca101e2fda6f0f988b6"} Oct 06 15:06:17 crc kubenswrapper[4757]: I1006 15:06:17.963695 4757 scope.go:117] "RemoveContainer" containerID="3ca0b04ce109d1564491f4a49e354513b15fbb9cbb3016431c7507b0f672d890" Oct 06 15:06:17 crc kubenswrapper[4757]: I1006 15:06:17.963721 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-x79pc" Oct 06 15:06:17 crc kubenswrapper[4757]: I1006 15:06:17.990457 4757 scope.go:117] "RemoveContainer" containerID="99f36b3b7e9b0ab4910caac6b8cfa59bf2f71443a84c78d33eef074588ca4783" Oct 06 15:06:18 crc kubenswrapper[4757]: I1006 15:06:18.017459 4757 scope.go:117] "RemoveContainer" containerID="966e3e0ca06e18aed4ea2299fe25475089d2ef9bcbe38c87354c6cc9cf12c114" Oct 06 15:06:18 crc kubenswrapper[4757]: I1006 15:06:18.017567 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-x79pc"] Oct 06 15:06:18 crc kubenswrapper[4757]: I1006 15:06:18.026076 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-x79pc"] Oct 06 15:06:18 crc kubenswrapper[4757]: I1006 15:06:18.058929 4757 scope.go:117] "RemoveContainer" containerID="3ca0b04ce109d1564491f4a49e354513b15fbb9cbb3016431c7507b0f672d890" Oct 06 15:06:18 crc kubenswrapper[4757]: E1006 15:06:18.059377 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3ca0b04ce109d1564491f4a49e354513b15fbb9cbb3016431c7507b0f672d890\": container with ID starting with 3ca0b04ce109d1564491f4a49e354513b15fbb9cbb3016431c7507b0f672d890 not found: ID does not exist" containerID="3ca0b04ce109d1564491f4a49e354513b15fbb9cbb3016431c7507b0f672d890" Oct 06 15:06:18 crc kubenswrapper[4757]: I1006 15:06:18.059425 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3ca0b04ce109d1564491f4a49e354513b15fbb9cbb3016431c7507b0f672d890"} err="failed to get container status \"3ca0b04ce109d1564491f4a49e354513b15fbb9cbb3016431c7507b0f672d890\": rpc error: code = NotFound desc = could not find container \"3ca0b04ce109d1564491f4a49e354513b15fbb9cbb3016431c7507b0f672d890\": container with ID starting with 3ca0b04ce109d1564491f4a49e354513b15fbb9cbb3016431c7507b0f672d890 not found: ID does not exist" Oct 06 15:06:18 crc kubenswrapper[4757]: I1006 15:06:18.059471 4757 scope.go:117] "RemoveContainer" containerID="99f36b3b7e9b0ab4910caac6b8cfa59bf2f71443a84c78d33eef074588ca4783" Oct 06 15:06:18 crc kubenswrapper[4757]: E1006 15:06:18.059795 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"99f36b3b7e9b0ab4910caac6b8cfa59bf2f71443a84c78d33eef074588ca4783\": container with ID starting with 99f36b3b7e9b0ab4910caac6b8cfa59bf2f71443a84c78d33eef074588ca4783 not found: ID does not exist" containerID="99f36b3b7e9b0ab4910caac6b8cfa59bf2f71443a84c78d33eef074588ca4783" Oct 06 15:06:18 crc kubenswrapper[4757]: I1006 15:06:18.059842 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"99f36b3b7e9b0ab4910caac6b8cfa59bf2f71443a84c78d33eef074588ca4783"} err="failed to get container status \"99f36b3b7e9b0ab4910caac6b8cfa59bf2f71443a84c78d33eef074588ca4783\": rpc error: code = NotFound desc = could not find container \"99f36b3b7e9b0ab4910caac6b8cfa59bf2f71443a84c78d33eef074588ca4783\": container with ID starting with 99f36b3b7e9b0ab4910caac6b8cfa59bf2f71443a84c78d33eef074588ca4783 not found: ID does not exist" Oct 06 15:06:18 crc kubenswrapper[4757]: I1006 15:06:18.059873 4757 scope.go:117] "RemoveContainer" containerID="966e3e0ca06e18aed4ea2299fe25475089d2ef9bcbe38c87354c6cc9cf12c114" Oct 06 15:06:18 crc kubenswrapper[4757]: E1006 15:06:18.060208 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"966e3e0ca06e18aed4ea2299fe25475089d2ef9bcbe38c87354c6cc9cf12c114\": container with ID starting with 966e3e0ca06e18aed4ea2299fe25475089d2ef9bcbe38c87354c6cc9cf12c114 not found: ID does not exist" containerID="966e3e0ca06e18aed4ea2299fe25475089d2ef9bcbe38c87354c6cc9cf12c114" Oct 06 15:06:18 crc kubenswrapper[4757]: I1006 15:06:18.060239 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"966e3e0ca06e18aed4ea2299fe25475089d2ef9bcbe38c87354c6cc9cf12c114"} err="failed to get container status \"966e3e0ca06e18aed4ea2299fe25475089d2ef9bcbe38c87354c6cc9cf12c114\": rpc error: code = NotFound desc = could not find container \"966e3e0ca06e18aed4ea2299fe25475089d2ef9bcbe38c87354c6cc9cf12c114\": container with ID starting with 966e3e0ca06e18aed4ea2299fe25475089d2ef9bcbe38c87354c6cc9cf12c114 not found: ID does not exist" Oct 06 15:06:18 crc kubenswrapper[4757]: I1006 15:06:18.195348 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8b58f202-91cd-4fd9-8aeb-ef6f51b890e1" path="/var/lib/kubelet/pods/8b58f202-91cd-4fd9-8aeb-ef6f51b890e1/volumes" Oct 06 15:06:29 crc kubenswrapper[4757]: I1006 15:06:29.180740 4757 scope.go:117] "RemoveContainer" containerID="5c4563212ce072d71c658e00c8096ab0ddad498c69fa13e6bee8d2a76ec2f125" Oct 06 15:06:29 crc kubenswrapper[4757]: E1006 15:06:29.181507 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.269942 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Oct 06 15:06:33 crc kubenswrapper[4757]: E1006 15:06:33.271144 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8b58f202-91cd-4fd9-8aeb-ef6f51b890e1" containerName="extract-utilities" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.271177 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="8b58f202-91cd-4fd9-8aeb-ef6f51b890e1" containerName="extract-utilities" Oct 06 15:06:33 crc kubenswrapper[4757]: E1006 15:06:33.271199 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8b58f202-91cd-4fd9-8aeb-ef6f51b890e1" containerName="registry-server" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.271212 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="8b58f202-91cd-4fd9-8aeb-ef6f51b890e1" containerName="registry-server" Oct 06 15:06:33 crc kubenswrapper[4757]: E1006 15:06:33.271265 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8b58f202-91cd-4fd9-8aeb-ef6f51b890e1" containerName="extract-content" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.271279 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="8b58f202-91cd-4fd9-8aeb-ef6f51b890e1" containerName="extract-content" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.271575 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="8b58f202-91cd-4fd9-8aeb-ef6f51b890e1" containerName="registry-server" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.272962 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.277647 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-2"] Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.282144 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-2" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.283055 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovn-metrics" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.283389 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-khq2s" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.284059 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.284482 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.284792 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-nb-ovndbs" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.287066 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-1"] Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.289997 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-1" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.304994 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.314630 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-2"] Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.321397 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-1"] Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.364778 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/95d03e2a-e649-41e0-bddb-9523672e5e9c-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"95d03e2a-e649-41e0-bddb-9523672e5e9c\") " pod="openstack/ovsdbserver-nb-0" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.364860 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/95d03e2a-e649-41e0-bddb-9523672e5e9c-config\") pod \"ovsdbserver-nb-0\" (UID: \"95d03e2a-e649-41e0-bddb-9523672e5e9c\") " pod="openstack/ovsdbserver-nb-0" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.364904 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/95d03e2a-e649-41e0-bddb-9523672e5e9c-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"95d03e2a-e649-41e0-bddb-9523672e5e9c\") " pod="openstack/ovsdbserver-nb-0" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.364960 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/95d03e2a-e649-41e0-bddb-9523672e5e9c-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"95d03e2a-e649-41e0-bddb-9523672e5e9c\") " pod="openstack/ovsdbserver-nb-0" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.365019 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/95d03e2a-e649-41e0-bddb-9523672e5e9c-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"95d03e2a-e649-41e0-bddb-9523672e5e9c\") " pod="openstack/ovsdbserver-nb-0" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.365078 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v9dj9\" (UniqueName: \"kubernetes.io/projected/95d03e2a-e649-41e0-bddb-9523672e5e9c-kube-api-access-v9dj9\") pod \"ovsdbserver-nb-0\" (UID: \"95d03e2a-e649-41e0-bddb-9523672e5e9c\") " pod="openstack/ovsdbserver-nb-0" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.365144 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/95d03e2a-e649-41e0-bddb-9523672e5e9c-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"95d03e2a-e649-41e0-bddb-9523672e5e9c\") " pod="openstack/ovsdbserver-nb-0" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.365196 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-692d5cfa-ed3d-4a9a-b0aa-5ce98c9ff54f\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-692d5cfa-ed3d-4a9a-b0aa-5ce98c9ff54f\") pod \"ovsdbserver-nb-0\" (UID: \"95d03e2a-e649-41e0-bddb-9523672e5e9c\") " pod="openstack/ovsdbserver-nb-0" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.427527 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.429723 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.432427 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.432814 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-sb-ovndbs" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.432918 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-9bjst" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.433186 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.437946 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-1"] Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.439735 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-1" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.444404 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.463535 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-2"] Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.470050 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/448d7cb5-8a9c-4430-acf9-e9d8662f9eaf-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-2\" (UID: \"448d7cb5-8a9c-4430-acf9-e9d8662f9eaf\") " pod="openstack/ovsdbserver-nb-2" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.470114 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/66e11e3d-a78e-4706-a789-a997c6f73a64-ovsdb-rundir\") pod \"ovsdbserver-nb-1\" (UID: \"66e11e3d-a78e-4706-a789-a997c6f73a64\") " pod="openstack/ovsdbserver-nb-1" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.470656 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/95d03e2a-e649-41e0-bddb-9523672e5e9c-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"95d03e2a-e649-41e0-bddb-9523672e5e9c\") " pod="openstack/ovsdbserver-nb-0" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.470707 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/448d7cb5-8a9c-4430-acf9-e9d8662f9eaf-combined-ca-bundle\") pod \"ovsdbserver-nb-2\" (UID: \"448d7cb5-8a9c-4430-acf9-e9d8662f9eaf\") " pod="openstack/ovsdbserver-nb-2" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.470744 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/95d03e2a-e649-41e0-bddb-9523672e5e9c-config\") pod \"ovsdbserver-nb-0\" (UID: \"95d03e2a-e649-41e0-bddb-9523672e5e9c\") " pod="openstack/ovsdbserver-nb-0" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.470775 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/66e11e3d-a78e-4706-a789-a997c6f73a64-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-1\" (UID: \"66e11e3d-a78e-4706-a789-a997c6f73a64\") " pod="openstack/ovsdbserver-nb-1" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.470802 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/95d03e2a-e649-41e0-bddb-9523672e5e9c-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"95d03e2a-e649-41e0-bddb-9523672e5e9c\") " pod="openstack/ovsdbserver-nb-0" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.470840 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/448d7cb5-8a9c-4430-acf9-e9d8662f9eaf-config\") pod \"ovsdbserver-nb-2\" (UID: \"448d7cb5-8a9c-4430-acf9-e9d8662f9eaf\") " pod="openstack/ovsdbserver-nb-2" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.470865 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/95d03e2a-e649-41e0-bddb-9523672e5e9c-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"95d03e2a-e649-41e0-bddb-9523672e5e9c\") " pod="openstack/ovsdbserver-nb-0" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.470888 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/66e11e3d-a78e-4706-a789-a997c6f73a64-combined-ca-bundle\") pod \"ovsdbserver-nb-1\" (UID: \"66e11e3d-a78e-4706-a789-a997c6f73a64\") " pod="openstack/ovsdbserver-nb-1" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.470912 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/95d03e2a-e649-41e0-bddb-9523672e5e9c-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"95d03e2a-e649-41e0-bddb-9523672e5e9c\") " pod="openstack/ovsdbserver-nb-0" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.470941 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-27cf04f5-55a1-4ab3-ac37-292e049aa744\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-27cf04f5-55a1-4ab3-ac37-292e049aa744\") pod \"ovsdbserver-nb-2\" (UID: \"448d7cb5-8a9c-4430-acf9-e9d8662f9eaf\") " pod="openstack/ovsdbserver-nb-2" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.470972 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/66e11e3d-a78e-4706-a789-a997c6f73a64-scripts\") pod \"ovsdbserver-nb-1\" (UID: \"66e11e3d-a78e-4706-a789-a997c6f73a64\") " pod="openstack/ovsdbserver-nb-1" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.471000 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-b5a988e4-1d9e-45a2-b4a2-1bb6840a51ad\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b5a988e4-1d9e-45a2-b4a2-1bb6840a51ad\") pod \"ovsdbserver-nb-1\" (UID: \"66e11e3d-a78e-4706-a789-a997c6f73a64\") " pod="openstack/ovsdbserver-nb-1" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.471021 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/66e11e3d-a78e-4706-a789-a997c6f73a64-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-1\" (UID: \"66e11e3d-a78e-4706-a789-a997c6f73a64\") " pod="openstack/ovsdbserver-nb-1" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.471046 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v9dj9\" (UniqueName: \"kubernetes.io/projected/95d03e2a-e649-41e0-bddb-9523672e5e9c-kube-api-access-v9dj9\") pod \"ovsdbserver-nb-0\" (UID: \"95d03e2a-e649-41e0-bddb-9523672e5e9c\") " pod="openstack/ovsdbserver-nb-0" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.471069 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/95d03e2a-e649-41e0-bddb-9523672e5e9c-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"95d03e2a-e649-41e0-bddb-9523672e5e9c\") " pod="openstack/ovsdbserver-nb-0" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.471126 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-692d5cfa-ed3d-4a9a-b0aa-5ce98c9ff54f\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-692d5cfa-ed3d-4a9a-b0aa-5ce98c9ff54f\") pod \"ovsdbserver-nb-0\" (UID: \"95d03e2a-e649-41e0-bddb-9523672e5e9c\") " pod="openstack/ovsdbserver-nb-0" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.471151 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/448d7cb5-8a9c-4430-acf9-e9d8662f9eaf-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-2\" (UID: \"448d7cb5-8a9c-4430-acf9-e9d8662f9eaf\") " pod="openstack/ovsdbserver-nb-2" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.471175 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9j8j5\" (UniqueName: \"kubernetes.io/projected/448d7cb5-8a9c-4430-acf9-e9d8662f9eaf-kube-api-access-9j8j5\") pod \"ovsdbserver-nb-2\" (UID: \"448d7cb5-8a9c-4430-acf9-e9d8662f9eaf\") " pod="openstack/ovsdbserver-nb-2" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.471199 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/448d7cb5-8a9c-4430-acf9-e9d8662f9eaf-scripts\") pod \"ovsdbserver-nb-2\" (UID: \"448d7cb5-8a9c-4430-acf9-e9d8662f9eaf\") " pod="openstack/ovsdbserver-nb-2" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.471218 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/66e11e3d-a78e-4706-a789-a997c6f73a64-config\") pod \"ovsdbserver-nb-1\" (UID: \"66e11e3d-a78e-4706-a789-a997c6f73a64\") " pod="openstack/ovsdbserver-nb-1" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.471246 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/448d7cb5-8a9c-4430-acf9-e9d8662f9eaf-ovsdb-rundir\") pod \"ovsdbserver-nb-2\" (UID: \"448d7cb5-8a9c-4430-acf9-e9d8662f9eaf\") " pod="openstack/ovsdbserver-nb-2" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.471271 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-84k7n\" (UniqueName: \"kubernetes.io/projected/66e11e3d-a78e-4706-a789-a997c6f73a64-kube-api-access-84k7n\") pod \"ovsdbserver-nb-1\" (UID: \"66e11e3d-a78e-4706-a789-a997c6f73a64\") " pod="openstack/ovsdbserver-nb-1" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.475481 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/95d03e2a-e649-41e0-bddb-9523672e5e9c-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"95d03e2a-e649-41e0-bddb-9523672e5e9c\") " pod="openstack/ovsdbserver-nb-0" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.476806 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/95d03e2a-e649-41e0-bddb-9523672e5e9c-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"95d03e2a-e649-41e0-bddb-9523672e5e9c\") " pod="openstack/ovsdbserver-nb-0" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.476943 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/95d03e2a-e649-41e0-bddb-9523672e5e9c-config\") pod \"ovsdbserver-nb-0\" (UID: \"95d03e2a-e649-41e0-bddb-9523672e5e9c\") " pod="openstack/ovsdbserver-nb-0" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.477274 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-2" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.478559 4757 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.478594 4757 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-692d5cfa-ed3d-4a9a-b0aa-5ce98c9ff54f\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-692d5cfa-ed3d-4a9a-b0aa-5ce98c9ff54f\") pod \"ovsdbserver-nb-0\" (UID: \"95d03e2a-e649-41e0-bddb-9523672e5e9c\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/38014532bee56e4224cc9e7ea451fee680d8a33c8f3b25889cee9145b1e4db1c/globalmount\"" pod="openstack/ovsdbserver-nb-0" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.479369 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-1"] Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.482582 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/95d03e2a-e649-41e0-bddb-9523672e5e9c-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"95d03e2a-e649-41e0-bddb-9523672e5e9c\") " pod="openstack/ovsdbserver-nb-0" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.484745 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/95d03e2a-e649-41e0-bddb-9523672e5e9c-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"95d03e2a-e649-41e0-bddb-9523672e5e9c\") " pod="openstack/ovsdbserver-nb-0" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.490619 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-2"] Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.498964 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v9dj9\" (UniqueName: \"kubernetes.io/projected/95d03e2a-e649-41e0-bddb-9523672e5e9c-kube-api-access-v9dj9\") pod \"ovsdbserver-nb-0\" (UID: \"95d03e2a-e649-41e0-bddb-9523672e5e9c\") " pod="openstack/ovsdbserver-nb-0" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.501416 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/95d03e2a-e649-41e0-bddb-9523672e5e9c-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"95d03e2a-e649-41e0-bddb-9523672e5e9c\") " pod="openstack/ovsdbserver-nb-0" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.534810 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-692d5cfa-ed3d-4a9a-b0aa-5ce98c9ff54f\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-692d5cfa-ed3d-4a9a-b0aa-5ce98c9ff54f\") pod \"ovsdbserver-nb-0\" (UID: \"95d03e2a-e649-41e0-bddb-9523672e5e9c\") " pod="openstack/ovsdbserver-nb-0" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.573658 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/65a44faf-fdf2-4ef5-9af8-6c31e0d9240b-ovsdb-rundir\") pod \"ovsdbserver-sb-1\" (UID: \"65a44faf-fdf2-4ef5-9af8-6c31e0d9240b\") " pod="openstack/ovsdbserver-sb-1" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.573798 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/be616da6-c336-4c71-b23c-3137a9a6c9ff-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"be616da6-c336-4c71-b23c-3137a9a6c9ff\") " pod="openstack/ovsdbserver-sb-0" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.573835 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/1fbe1234-296b-41ce-a03e-fde5b9abc505-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-2\" (UID: \"1fbe1234-296b-41ce-a03e-fde5b9abc505\") " pod="openstack/ovsdbserver-sb-2" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.573897 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/be616da6-c336-4c71-b23c-3137a9a6c9ff-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"be616da6-c336-4c71-b23c-3137a9a6c9ff\") " pod="openstack/ovsdbserver-sb-0" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.573955 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-c1e5dffa-c497-42ae-856d-1bb750ee38aa\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c1e5dffa-c497-42ae-856d-1bb750ee38aa\") pod \"ovsdbserver-sb-2\" (UID: \"1fbe1234-296b-41ce-a03e-fde5b9abc505\") " pod="openstack/ovsdbserver-sb-2" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.573986 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/448d7cb5-8a9c-4430-acf9-e9d8662f9eaf-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-2\" (UID: \"448d7cb5-8a9c-4430-acf9-e9d8662f9eaf\") " pod="openstack/ovsdbserver-nb-2" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.574444 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/be616da6-c336-4c71-b23c-3137a9a6c9ff-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"be616da6-c336-4c71-b23c-3137a9a6c9ff\") " pod="openstack/ovsdbserver-sb-0" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.574522 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/65a44faf-fdf2-4ef5-9af8-6c31e0d9240b-config\") pod \"ovsdbserver-sb-1\" (UID: \"65a44faf-fdf2-4ef5-9af8-6c31e0d9240b\") " pod="openstack/ovsdbserver-sb-1" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.574553 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/66e11e3d-a78e-4706-a789-a997c6f73a64-ovsdb-rundir\") pod \"ovsdbserver-nb-1\" (UID: \"66e11e3d-a78e-4706-a789-a997c6f73a64\") " pod="openstack/ovsdbserver-nb-1" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.574614 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1fbe1234-296b-41ce-a03e-fde5b9abc505-scripts\") pod \"ovsdbserver-sb-2\" (UID: \"1fbe1234-296b-41ce-a03e-fde5b9abc505\") " pod="openstack/ovsdbserver-sb-2" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.574641 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-2b13b2c2-2e1e-47eb-8fae-857dfae8647e\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-2b13b2c2-2e1e-47eb-8fae-857dfae8647e\") pod \"ovsdbserver-sb-0\" (UID: \"be616da6-c336-4c71-b23c-3137a9a6c9ff\") " pod="openstack/ovsdbserver-sb-0" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.574712 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/be616da6-c336-4c71-b23c-3137a9a6c9ff-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"be616da6-c336-4c71-b23c-3137a9a6c9ff\") " pod="openstack/ovsdbserver-sb-0" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.574758 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/448d7cb5-8a9c-4430-acf9-e9d8662f9eaf-combined-ca-bundle\") pod \"ovsdbserver-nb-2\" (UID: \"448d7cb5-8a9c-4430-acf9-e9d8662f9eaf\") " pod="openstack/ovsdbserver-nb-2" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.574783 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/be616da6-c336-4c71-b23c-3137a9a6c9ff-config\") pod \"ovsdbserver-sb-0\" (UID: \"be616da6-c336-4c71-b23c-3137a9a6c9ff\") " pod="openstack/ovsdbserver-sb-0" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.574964 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/66e11e3d-a78e-4706-a789-a997c6f73a64-ovsdb-rundir\") pod \"ovsdbserver-nb-1\" (UID: \"66e11e3d-a78e-4706-a789-a997c6f73a64\") " pod="openstack/ovsdbserver-nb-1" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.575030 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/66e11e3d-a78e-4706-a789-a997c6f73a64-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-1\" (UID: \"66e11e3d-a78e-4706-a789-a997c6f73a64\") " pod="openstack/ovsdbserver-nb-1" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.575146 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be616da6-c336-4c71-b23c-3137a9a6c9ff-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"be616da6-c336-4c71-b23c-3137a9a6c9ff\") " pod="openstack/ovsdbserver-sb-0" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.575317 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/448d7cb5-8a9c-4430-acf9-e9d8662f9eaf-config\") pod \"ovsdbserver-nb-2\" (UID: \"448d7cb5-8a9c-4430-acf9-e9d8662f9eaf\") " pod="openstack/ovsdbserver-nb-2" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.575372 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1fbe1234-296b-41ce-a03e-fde5b9abc505-combined-ca-bundle\") pod \"ovsdbserver-sb-2\" (UID: \"1fbe1234-296b-41ce-a03e-fde5b9abc505\") " pod="openstack/ovsdbserver-sb-2" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.575403 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/1fbe1234-296b-41ce-a03e-fde5b9abc505-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-2\" (UID: \"1fbe1234-296b-41ce-a03e-fde5b9abc505\") " pod="openstack/ovsdbserver-sb-2" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.575471 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/66e11e3d-a78e-4706-a789-a997c6f73a64-combined-ca-bundle\") pod \"ovsdbserver-nb-1\" (UID: \"66e11e3d-a78e-4706-a789-a997c6f73a64\") " pod="openstack/ovsdbserver-nb-1" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.575500 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9xd9d\" (UniqueName: \"kubernetes.io/projected/1fbe1234-296b-41ce-a03e-fde5b9abc505-kube-api-access-9xd9d\") pod \"ovsdbserver-sb-2\" (UID: \"1fbe1234-296b-41ce-a03e-fde5b9abc505\") " pod="openstack/ovsdbserver-sb-2" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.575550 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/65a44faf-fdf2-4ef5-9af8-6c31e0d9240b-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-1\" (UID: \"65a44faf-fdf2-4ef5-9af8-6c31e0d9240b\") " pod="openstack/ovsdbserver-sb-1" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.575605 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-27cf04f5-55a1-4ab3-ac37-292e049aa744\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-27cf04f5-55a1-4ab3-ac37-292e049aa744\") pod \"ovsdbserver-nb-2\" (UID: \"448d7cb5-8a9c-4430-acf9-e9d8662f9eaf\") " pod="openstack/ovsdbserver-nb-2" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.575648 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1fbe1234-296b-41ce-a03e-fde5b9abc505-config\") pod \"ovsdbserver-sb-2\" (UID: \"1fbe1234-296b-41ce-a03e-fde5b9abc505\") " pod="openstack/ovsdbserver-sb-2" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.575697 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/66e11e3d-a78e-4706-a789-a997c6f73a64-scripts\") pod \"ovsdbserver-nb-1\" (UID: \"66e11e3d-a78e-4706-a789-a997c6f73a64\") " pod="openstack/ovsdbserver-nb-1" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.575735 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-1d7ded27-be68-4dc7-983c-5d669df7d4fb\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-1d7ded27-be68-4dc7-983c-5d669df7d4fb\") pod \"ovsdbserver-sb-1\" (UID: \"65a44faf-fdf2-4ef5-9af8-6c31e0d9240b\") " pod="openstack/ovsdbserver-sb-1" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.575763 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65a44faf-fdf2-4ef5-9af8-6c31e0d9240b-combined-ca-bundle\") pod \"ovsdbserver-sb-1\" (UID: \"65a44faf-fdf2-4ef5-9af8-6c31e0d9240b\") " pod="openstack/ovsdbserver-sb-1" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.575789 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-b5a988e4-1d9e-45a2-b4a2-1bb6840a51ad\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b5a988e4-1d9e-45a2-b4a2-1bb6840a51ad\") pod \"ovsdbserver-nb-1\" (UID: \"66e11e3d-a78e-4706-a789-a997c6f73a64\") " pod="openstack/ovsdbserver-nb-1" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.575819 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/66e11e3d-a78e-4706-a789-a997c6f73a64-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-1\" (UID: \"66e11e3d-a78e-4706-a789-a997c6f73a64\") " pod="openstack/ovsdbserver-nb-1" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.575879 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/1fbe1234-296b-41ce-a03e-fde5b9abc505-ovsdb-rundir\") pod \"ovsdbserver-sb-2\" (UID: \"1fbe1234-296b-41ce-a03e-fde5b9abc505\") " pod="openstack/ovsdbserver-sb-2" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.575924 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7nhf6\" (UniqueName: \"kubernetes.io/projected/65a44faf-fdf2-4ef5-9af8-6c31e0d9240b-kube-api-access-7nhf6\") pod \"ovsdbserver-sb-1\" (UID: \"65a44faf-fdf2-4ef5-9af8-6c31e0d9240b\") " pod="openstack/ovsdbserver-sb-1" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.575958 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9j8j5\" (UniqueName: \"kubernetes.io/projected/448d7cb5-8a9c-4430-acf9-e9d8662f9eaf-kube-api-access-9j8j5\") pod \"ovsdbserver-nb-2\" (UID: \"448d7cb5-8a9c-4430-acf9-e9d8662f9eaf\") " pod="openstack/ovsdbserver-nb-2" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.575974 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/448d7cb5-8a9c-4430-acf9-e9d8662f9eaf-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-2\" (UID: \"448d7cb5-8a9c-4430-acf9-e9d8662f9eaf\") " pod="openstack/ovsdbserver-nb-2" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.575999 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/448d7cb5-8a9c-4430-acf9-e9d8662f9eaf-scripts\") pod \"ovsdbserver-nb-2\" (UID: \"448d7cb5-8a9c-4430-acf9-e9d8662f9eaf\") " pod="openstack/ovsdbserver-nb-2" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.576013 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/66e11e3d-a78e-4706-a789-a997c6f73a64-config\") pod \"ovsdbserver-nb-1\" (UID: \"66e11e3d-a78e-4706-a789-a997c6f73a64\") " pod="openstack/ovsdbserver-nb-1" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.576040 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/65a44faf-fdf2-4ef5-9af8-6c31e0d9240b-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-1\" (UID: \"65a44faf-fdf2-4ef5-9af8-6c31e0d9240b\") " pod="openstack/ovsdbserver-sb-1" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.576074 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/448d7cb5-8a9c-4430-acf9-e9d8662f9eaf-ovsdb-rundir\") pod \"ovsdbserver-nb-2\" (UID: \"448d7cb5-8a9c-4430-acf9-e9d8662f9eaf\") " pod="openstack/ovsdbserver-nb-2" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.576109 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-84k7n\" (UniqueName: \"kubernetes.io/projected/66e11e3d-a78e-4706-a789-a997c6f73a64-kube-api-access-84k7n\") pod \"ovsdbserver-nb-1\" (UID: \"66e11e3d-a78e-4706-a789-a997c6f73a64\") " pod="openstack/ovsdbserver-nb-1" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.576127 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/65a44faf-fdf2-4ef5-9af8-6c31e0d9240b-scripts\") pod \"ovsdbserver-sb-1\" (UID: \"65a44faf-fdf2-4ef5-9af8-6c31e0d9240b\") " pod="openstack/ovsdbserver-sb-1" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.576154 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gxp8b\" (UniqueName: \"kubernetes.io/projected/be616da6-c336-4c71-b23c-3137a9a6c9ff-kube-api-access-gxp8b\") pod \"ovsdbserver-sb-0\" (UID: \"be616da6-c336-4c71-b23c-3137a9a6c9ff\") " pod="openstack/ovsdbserver-sb-0" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.576156 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/448d7cb5-8a9c-4430-acf9-e9d8662f9eaf-config\") pod \"ovsdbserver-nb-2\" (UID: \"448d7cb5-8a9c-4430-acf9-e9d8662f9eaf\") " pod="openstack/ovsdbserver-nb-2" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.577034 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/66e11e3d-a78e-4706-a789-a997c6f73a64-config\") pod \"ovsdbserver-nb-1\" (UID: \"66e11e3d-a78e-4706-a789-a997c6f73a64\") " pod="openstack/ovsdbserver-nb-1" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.577073 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/66e11e3d-a78e-4706-a789-a997c6f73a64-scripts\") pod \"ovsdbserver-nb-1\" (UID: \"66e11e3d-a78e-4706-a789-a997c6f73a64\") " pod="openstack/ovsdbserver-nb-1" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.577966 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/448d7cb5-8a9c-4430-acf9-e9d8662f9eaf-scripts\") pod \"ovsdbserver-nb-2\" (UID: \"448d7cb5-8a9c-4430-acf9-e9d8662f9eaf\") " pod="openstack/ovsdbserver-nb-2" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.580715 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/448d7cb5-8a9c-4430-acf9-e9d8662f9eaf-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-2\" (UID: \"448d7cb5-8a9c-4430-acf9-e9d8662f9eaf\") " pod="openstack/ovsdbserver-nb-2" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.580768 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/448d7cb5-8a9c-4430-acf9-e9d8662f9eaf-ovsdb-rundir\") pod \"ovsdbserver-nb-2\" (UID: \"448d7cb5-8a9c-4430-acf9-e9d8662f9eaf\") " pod="openstack/ovsdbserver-nb-2" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.581631 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/66e11e3d-a78e-4706-a789-a997c6f73a64-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-1\" (UID: \"66e11e3d-a78e-4706-a789-a997c6f73a64\") " pod="openstack/ovsdbserver-nb-1" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.582421 4757 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.582461 4757 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-27cf04f5-55a1-4ab3-ac37-292e049aa744\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-27cf04f5-55a1-4ab3-ac37-292e049aa744\") pod \"ovsdbserver-nb-2\" (UID: \"448d7cb5-8a9c-4430-acf9-e9d8662f9eaf\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/59fc266f4151ff45c38db221859a4aca1929bbe34180f09454ebca18c5f41146/globalmount\"" pod="openstack/ovsdbserver-nb-2" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.582784 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/66e11e3d-a78e-4706-a789-a997c6f73a64-combined-ca-bundle\") pod \"ovsdbserver-nb-1\" (UID: \"66e11e3d-a78e-4706-a789-a997c6f73a64\") " pod="openstack/ovsdbserver-nb-1" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.582988 4757 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.584051 4757 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-b5a988e4-1d9e-45a2-b4a2-1bb6840a51ad\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b5a988e4-1d9e-45a2-b4a2-1bb6840a51ad\") pod \"ovsdbserver-nb-1\" (UID: \"66e11e3d-a78e-4706-a789-a997c6f73a64\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/f0926a6819fdc6045b2b96629e0921fc0e685943cf1bff41ada8f22f9339fc32/globalmount\"" pod="openstack/ovsdbserver-nb-1" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.583704 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/448d7cb5-8a9c-4430-acf9-e9d8662f9eaf-combined-ca-bundle\") pod \"ovsdbserver-nb-2\" (UID: \"448d7cb5-8a9c-4430-acf9-e9d8662f9eaf\") " pod="openstack/ovsdbserver-nb-2" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.588117 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/66e11e3d-a78e-4706-a789-a997c6f73a64-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-1\" (UID: \"66e11e3d-a78e-4706-a789-a997c6f73a64\") " pod="openstack/ovsdbserver-nb-1" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.593863 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9j8j5\" (UniqueName: \"kubernetes.io/projected/448d7cb5-8a9c-4430-acf9-e9d8662f9eaf-kube-api-access-9j8j5\") pod \"ovsdbserver-nb-2\" (UID: \"448d7cb5-8a9c-4430-acf9-e9d8662f9eaf\") " pod="openstack/ovsdbserver-nb-2" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.594565 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-84k7n\" (UniqueName: \"kubernetes.io/projected/66e11e3d-a78e-4706-a789-a997c6f73a64-kube-api-access-84k7n\") pod \"ovsdbserver-nb-1\" (UID: \"66e11e3d-a78e-4706-a789-a997c6f73a64\") " pod="openstack/ovsdbserver-nb-1" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.596601 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/448d7cb5-8a9c-4430-acf9-e9d8662f9eaf-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-2\" (UID: \"448d7cb5-8a9c-4430-acf9-e9d8662f9eaf\") " pod="openstack/ovsdbserver-nb-2" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.613571 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-27cf04f5-55a1-4ab3-ac37-292e049aa744\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-27cf04f5-55a1-4ab3-ac37-292e049aa744\") pod \"ovsdbserver-nb-2\" (UID: \"448d7cb5-8a9c-4430-acf9-e9d8662f9eaf\") " pod="openstack/ovsdbserver-nb-2" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.614899 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-b5a988e4-1d9e-45a2-b4a2-1bb6840a51ad\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b5a988e4-1d9e-45a2-b4a2-1bb6840a51ad\") pod \"ovsdbserver-nb-1\" (UID: \"66e11e3d-a78e-4706-a789-a997c6f73a64\") " pod="openstack/ovsdbserver-nb-1" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.626040 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.645436 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-2" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.670399 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-1" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.678557 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/be616da6-c336-4c71-b23c-3137a9a6c9ff-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"be616da6-c336-4c71-b23c-3137a9a6c9ff\") " pod="openstack/ovsdbserver-sb-0" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.678629 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-c1e5dffa-c497-42ae-856d-1bb750ee38aa\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c1e5dffa-c497-42ae-856d-1bb750ee38aa\") pod \"ovsdbserver-sb-2\" (UID: \"1fbe1234-296b-41ce-a03e-fde5b9abc505\") " pod="openstack/ovsdbserver-sb-2" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.678662 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/be616da6-c336-4c71-b23c-3137a9a6c9ff-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"be616da6-c336-4c71-b23c-3137a9a6c9ff\") " pod="openstack/ovsdbserver-sb-0" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.678700 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/65a44faf-fdf2-4ef5-9af8-6c31e0d9240b-config\") pod \"ovsdbserver-sb-1\" (UID: \"65a44faf-fdf2-4ef5-9af8-6c31e0d9240b\") " pod="openstack/ovsdbserver-sb-1" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.678726 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1fbe1234-296b-41ce-a03e-fde5b9abc505-scripts\") pod \"ovsdbserver-sb-2\" (UID: \"1fbe1234-296b-41ce-a03e-fde5b9abc505\") " pod="openstack/ovsdbserver-sb-2" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.678751 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-2b13b2c2-2e1e-47eb-8fae-857dfae8647e\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-2b13b2c2-2e1e-47eb-8fae-857dfae8647e\") pod \"ovsdbserver-sb-0\" (UID: \"be616da6-c336-4c71-b23c-3137a9a6c9ff\") " pod="openstack/ovsdbserver-sb-0" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.678775 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/be616da6-c336-4c71-b23c-3137a9a6c9ff-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"be616da6-c336-4c71-b23c-3137a9a6c9ff\") " pod="openstack/ovsdbserver-sb-0" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.678801 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/be616da6-c336-4c71-b23c-3137a9a6c9ff-config\") pod \"ovsdbserver-sb-0\" (UID: \"be616da6-c336-4c71-b23c-3137a9a6c9ff\") " pod="openstack/ovsdbserver-sb-0" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.678835 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be616da6-c336-4c71-b23c-3137a9a6c9ff-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"be616da6-c336-4c71-b23c-3137a9a6c9ff\") " pod="openstack/ovsdbserver-sb-0" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.678872 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1fbe1234-296b-41ce-a03e-fde5b9abc505-combined-ca-bundle\") pod \"ovsdbserver-sb-2\" (UID: \"1fbe1234-296b-41ce-a03e-fde5b9abc505\") " pod="openstack/ovsdbserver-sb-2" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.678901 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/1fbe1234-296b-41ce-a03e-fde5b9abc505-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-2\" (UID: \"1fbe1234-296b-41ce-a03e-fde5b9abc505\") " pod="openstack/ovsdbserver-sb-2" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.678929 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9xd9d\" (UniqueName: \"kubernetes.io/projected/1fbe1234-296b-41ce-a03e-fde5b9abc505-kube-api-access-9xd9d\") pod \"ovsdbserver-sb-2\" (UID: \"1fbe1234-296b-41ce-a03e-fde5b9abc505\") " pod="openstack/ovsdbserver-sb-2" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.678949 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/65a44faf-fdf2-4ef5-9af8-6c31e0d9240b-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-1\" (UID: \"65a44faf-fdf2-4ef5-9af8-6c31e0d9240b\") " pod="openstack/ovsdbserver-sb-1" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.678974 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1fbe1234-296b-41ce-a03e-fde5b9abc505-config\") pod \"ovsdbserver-sb-2\" (UID: \"1fbe1234-296b-41ce-a03e-fde5b9abc505\") " pod="openstack/ovsdbserver-sb-2" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.678999 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-1d7ded27-be68-4dc7-983c-5d669df7d4fb\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-1d7ded27-be68-4dc7-983c-5d669df7d4fb\") pod \"ovsdbserver-sb-1\" (UID: \"65a44faf-fdf2-4ef5-9af8-6c31e0d9240b\") " pod="openstack/ovsdbserver-sb-1" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.679016 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65a44faf-fdf2-4ef5-9af8-6c31e0d9240b-combined-ca-bundle\") pod \"ovsdbserver-sb-1\" (UID: \"65a44faf-fdf2-4ef5-9af8-6c31e0d9240b\") " pod="openstack/ovsdbserver-sb-1" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.679046 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/1fbe1234-296b-41ce-a03e-fde5b9abc505-ovsdb-rundir\") pod \"ovsdbserver-sb-2\" (UID: \"1fbe1234-296b-41ce-a03e-fde5b9abc505\") " pod="openstack/ovsdbserver-sb-2" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.679069 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7nhf6\" (UniqueName: \"kubernetes.io/projected/65a44faf-fdf2-4ef5-9af8-6c31e0d9240b-kube-api-access-7nhf6\") pod \"ovsdbserver-sb-1\" (UID: \"65a44faf-fdf2-4ef5-9af8-6c31e0d9240b\") " pod="openstack/ovsdbserver-sb-1" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.679111 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/65a44faf-fdf2-4ef5-9af8-6c31e0d9240b-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-1\" (UID: \"65a44faf-fdf2-4ef5-9af8-6c31e0d9240b\") " pod="openstack/ovsdbserver-sb-1" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.679135 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/65a44faf-fdf2-4ef5-9af8-6c31e0d9240b-scripts\") pod \"ovsdbserver-sb-1\" (UID: \"65a44faf-fdf2-4ef5-9af8-6c31e0d9240b\") " pod="openstack/ovsdbserver-sb-1" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.679154 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gxp8b\" (UniqueName: \"kubernetes.io/projected/be616da6-c336-4c71-b23c-3137a9a6c9ff-kube-api-access-gxp8b\") pod \"ovsdbserver-sb-0\" (UID: \"be616da6-c336-4c71-b23c-3137a9a6c9ff\") " pod="openstack/ovsdbserver-sb-0" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.679186 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/65a44faf-fdf2-4ef5-9af8-6c31e0d9240b-ovsdb-rundir\") pod \"ovsdbserver-sb-1\" (UID: \"65a44faf-fdf2-4ef5-9af8-6c31e0d9240b\") " pod="openstack/ovsdbserver-sb-1" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.679211 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/be616da6-c336-4c71-b23c-3137a9a6c9ff-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"be616da6-c336-4c71-b23c-3137a9a6c9ff\") " pod="openstack/ovsdbserver-sb-0" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.679229 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/1fbe1234-296b-41ce-a03e-fde5b9abc505-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-2\" (UID: \"1fbe1234-296b-41ce-a03e-fde5b9abc505\") " pod="openstack/ovsdbserver-sb-2" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.680865 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/be616da6-c336-4c71-b23c-3137a9a6c9ff-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"be616da6-c336-4c71-b23c-3137a9a6c9ff\") " pod="openstack/ovsdbserver-sb-0" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.681078 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/65a44faf-fdf2-4ef5-9af8-6c31e0d9240b-config\") pod \"ovsdbserver-sb-1\" (UID: \"65a44faf-fdf2-4ef5-9af8-6c31e0d9240b\") " pod="openstack/ovsdbserver-sb-1" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.682887 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/be616da6-c336-4c71-b23c-3137a9a6c9ff-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"be616da6-c336-4c71-b23c-3137a9a6c9ff\") " pod="openstack/ovsdbserver-sb-0" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.683116 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/be616da6-c336-4c71-b23c-3137a9a6c9ff-config\") pod \"ovsdbserver-sb-0\" (UID: \"be616da6-c336-4c71-b23c-3137a9a6c9ff\") " pod="openstack/ovsdbserver-sb-0" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.683386 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1fbe1234-296b-41ce-a03e-fde5b9abc505-config\") pod \"ovsdbserver-sb-2\" (UID: \"1fbe1234-296b-41ce-a03e-fde5b9abc505\") " pod="openstack/ovsdbserver-sb-2" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.683744 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/65a44faf-fdf2-4ef5-9af8-6c31e0d9240b-ovsdb-rundir\") pod \"ovsdbserver-sb-1\" (UID: \"65a44faf-fdf2-4ef5-9af8-6c31e0d9240b\") " pod="openstack/ovsdbserver-sb-1" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.683912 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/1fbe1234-296b-41ce-a03e-fde5b9abc505-ovsdb-rundir\") pod \"ovsdbserver-sb-2\" (UID: \"1fbe1234-296b-41ce-a03e-fde5b9abc505\") " pod="openstack/ovsdbserver-sb-2" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.685256 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be616da6-c336-4c71-b23c-3137a9a6c9ff-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"be616da6-c336-4c71-b23c-3137a9a6c9ff\") " pod="openstack/ovsdbserver-sb-0" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.685948 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/65a44faf-fdf2-4ef5-9af8-6c31e0d9240b-scripts\") pod \"ovsdbserver-sb-1\" (UID: \"65a44faf-fdf2-4ef5-9af8-6c31e0d9240b\") " pod="openstack/ovsdbserver-sb-1" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.685982 4757 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.688075 4757 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-c1e5dffa-c497-42ae-856d-1bb750ee38aa\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c1e5dffa-c497-42ae-856d-1bb750ee38aa\") pod \"ovsdbserver-sb-2\" (UID: \"1fbe1234-296b-41ce-a03e-fde5b9abc505\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/342f4293ec611684a0b3be3bd237a38f51d8ea773f6009a52e5e8d62dcfc5d99/globalmount\"" pod="openstack/ovsdbserver-sb-2" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.686091 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/be616da6-c336-4c71-b23c-3137a9a6c9ff-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"be616da6-c336-4c71-b23c-3137a9a6c9ff\") " pod="openstack/ovsdbserver-sb-0" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.686492 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1fbe1234-296b-41ce-a03e-fde5b9abc505-scripts\") pod \"ovsdbserver-sb-2\" (UID: \"1fbe1234-296b-41ce-a03e-fde5b9abc505\") " pod="openstack/ovsdbserver-sb-2" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.686052 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/1fbe1234-296b-41ce-a03e-fde5b9abc505-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-2\" (UID: \"1fbe1234-296b-41ce-a03e-fde5b9abc505\") " pod="openstack/ovsdbserver-sb-2" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.697469 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65a44faf-fdf2-4ef5-9af8-6c31e0d9240b-combined-ca-bundle\") pod \"ovsdbserver-sb-1\" (UID: \"65a44faf-fdf2-4ef5-9af8-6c31e0d9240b\") " pod="openstack/ovsdbserver-sb-1" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.697838 4757 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.697900 4757 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-1d7ded27-be68-4dc7-983c-5d669df7d4fb\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-1d7ded27-be68-4dc7-983c-5d669df7d4fb\") pod \"ovsdbserver-sb-1\" (UID: \"65a44faf-fdf2-4ef5-9af8-6c31e0d9240b\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/fe2f3f39126553ef4f51482e0a3e1bb0810c99ae855f7e33c83f66e5cbac58e4/globalmount\"" pod="openstack/ovsdbserver-sb-1" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.698428 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/1fbe1234-296b-41ce-a03e-fde5b9abc505-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-2\" (UID: \"1fbe1234-296b-41ce-a03e-fde5b9abc505\") " pod="openstack/ovsdbserver-sb-2" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.698523 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/65a44faf-fdf2-4ef5-9af8-6c31e0d9240b-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-1\" (UID: \"65a44faf-fdf2-4ef5-9af8-6c31e0d9240b\") " pod="openstack/ovsdbserver-sb-1" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.699731 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/65a44faf-fdf2-4ef5-9af8-6c31e0d9240b-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-1\" (UID: \"65a44faf-fdf2-4ef5-9af8-6c31e0d9240b\") " pod="openstack/ovsdbserver-sb-1" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.701906 4757 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.701934 4757 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-2b13b2c2-2e1e-47eb-8fae-857dfae8647e\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-2b13b2c2-2e1e-47eb-8fae-857dfae8647e\") pod \"ovsdbserver-sb-0\" (UID: \"be616da6-c336-4c71-b23c-3137a9a6c9ff\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/08cfd60228f1f911e2e372986c1acb69108f26d0bbb97059e11490e6940afcfc/globalmount\"" pod="openstack/ovsdbserver-sb-0" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.702641 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7nhf6\" (UniqueName: \"kubernetes.io/projected/65a44faf-fdf2-4ef5-9af8-6c31e0d9240b-kube-api-access-7nhf6\") pod \"ovsdbserver-sb-1\" (UID: \"65a44faf-fdf2-4ef5-9af8-6c31e0d9240b\") " pod="openstack/ovsdbserver-sb-1" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.702850 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/be616da6-c336-4c71-b23c-3137a9a6c9ff-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"be616da6-c336-4c71-b23c-3137a9a6c9ff\") " pod="openstack/ovsdbserver-sb-0" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.706470 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1fbe1234-296b-41ce-a03e-fde5b9abc505-combined-ca-bundle\") pod \"ovsdbserver-sb-2\" (UID: \"1fbe1234-296b-41ce-a03e-fde5b9abc505\") " pod="openstack/ovsdbserver-sb-2" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.712284 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gxp8b\" (UniqueName: \"kubernetes.io/projected/be616da6-c336-4c71-b23c-3137a9a6c9ff-kube-api-access-gxp8b\") pod \"ovsdbserver-sb-0\" (UID: \"be616da6-c336-4c71-b23c-3137a9a6c9ff\") " pod="openstack/ovsdbserver-sb-0" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.714830 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9xd9d\" (UniqueName: \"kubernetes.io/projected/1fbe1234-296b-41ce-a03e-fde5b9abc505-kube-api-access-9xd9d\") pod \"ovsdbserver-sb-2\" (UID: \"1fbe1234-296b-41ce-a03e-fde5b9abc505\") " pod="openstack/ovsdbserver-sb-2" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.746314 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-1d7ded27-be68-4dc7-983c-5d669df7d4fb\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-1d7ded27-be68-4dc7-983c-5d669df7d4fb\") pod \"ovsdbserver-sb-1\" (UID: \"65a44faf-fdf2-4ef5-9af8-6c31e0d9240b\") " pod="openstack/ovsdbserver-sb-1" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.766407 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-2b13b2c2-2e1e-47eb-8fae-857dfae8647e\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-2b13b2c2-2e1e-47eb-8fae-857dfae8647e\") pod \"ovsdbserver-sb-0\" (UID: \"be616da6-c336-4c71-b23c-3137a9a6c9ff\") " pod="openstack/ovsdbserver-sb-0" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.783340 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-c1e5dffa-c497-42ae-856d-1bb750ee38aa\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c1e5dffa-c497-42ae-856d-1bb750ee38aa\") pod \"ovsdbserver-sb-2\" (UID: \"1fbe1234-296b-41ce-a03e-fde5b9abc505\") " pod="openstack/ovsdbserver-sb-2" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.795858 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-1" Oct 06 15:06:33 crc kubenswrapper[4757]: I1006 15:06:33.868264 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-2" Oct 06 15:06:34 crc kubenswrapper[4757]: I1006 15:06:34.070353 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Oct 06 15:06:34 crc kubenswrapper[4757]: I1006 15:06:34.125306 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Oct 06 15:06:34 crc kubenswrapper[4757]: I1006 15:06:34.230186 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-2"] Oct 06 15:06:34 crc kubenswrapper[4757]: I1006 15:06:34.333967 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-1"] Oct 06 15:06:34 crc kubenswrapper[4757]: W1006 15:06:34.334537 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod66e11e3d_a78e_4706_a789_a997c6f73a64.slice/crio-81da3861d1a2c2d0c03a6f61008b060e61541b5acfd658e0087b6d7fd61d440b WatchSource:0}: Error finding container 81da3861d1a2c2d0c03a6f61008b060e61541b5acfd658e0087b6d7fd61d440b: Status 404 returned error can't find the container with id 81da3861d1a2c2d0c03a6f61008b060e61541b5acfd658e0087b6d7fd61d440b Oct 06 15:06:34 crc kubenswrapper[4757]: I1006 15:06:34.423549 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-1"] Oct 06 15:06:34 crc kubenswrapper[4757]: I1006 15:06:34.632885 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Oct 06 15:06:34 crc kubenswrapper[4757]: W1006 15:06:34.640029 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbe616da6_c336_4c71_b23c_3137a9a6c9ff.slice/crio-f0c9707ff79652fcf4b6c63a4586e2194f8046d07ac38c4c25807f394287448b WatchSource:0}: Error finding container f0c9707ff79652fcf4b6c63a4586e2194f8046d07ac38c4c25807f394287448b: Status 404 returned error can't find the container with id f0c9707ff79652fcf4b6c63a4586e2194f8046d07ac38c4c25807f394287448b Oct 06 15:06:35 crc kubenswrapper[4757]: I1006 15:06:35.127951 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-2" event={"ID":"448d7cb5-8a9c-4430-acf9-e9d8662f9eaf","Type":"ContainerStarted","Data":"c98563bca02e31d46375ce308bc28c208c2d9f6ccd4854117edab663a57cb9b7"} Oct 06 15:06:35 crc kubenswrapper[4757]: I1006 15:06:35.128307 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-2" event={"ID":"448d7cb5-8a9c-4430-acf9-e9d8662f9eaf","Type":"ContainerStarted","Data":"3491bf33318bf09e17f1aa9a16076c6fed056cd5dcf42242cff86f5c98cd03b5"} Oct 06 15:06:35 crc kubenswrapper[4757]: I1006 15:06:35.128321 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-2" event={"ID":"448d7cb5-8a9c-4430-acf9-e9d8662f9eaf","Type":"ContainerStarted","Data":"edf2092cd31acba49d543ddc3fa1b7211c585ce3c3f14ebf09f5e21d75a13f40"} Oct 06 15:06:35 crc kubenswrapper[4757]: I1006 15:06:35.133638 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-1" event={"ID":"65a44faf-fdf2-4ef5-9af8-6c31e0d9240b","Type":"ContainerStarted","Data":"cf31d46ca5863c0c9ea0ac8b70f182873188cbab4536a938c1417174dd2f53ae"} Oct 06 15:06:35 crc kubenswrapper[4757]: I1006 15:06:35.133779 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-1" event={"ID":"65a44faf-fdf2-4ef5-9af8-6c31e0d9240b","Type":"ContainerStarted","Data":"d00e817edbb10bb810d4e5a17961fcae76e543826546d7cf0da893ce3d1c201d"} Oct 06 15:06:35 crc kubenswrapper[4757]: I1006 15:06:35.133861 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-1" event={"ID":"65a44faf-fdf2-4ef5-9af8-6c31e0d9240b","Type":"ContainerStarted","Data":"4dec55c914b6316aae6740119f1e1287f20e698e59a79492f7e6769f5aac326d"} Oct 06 15:06:35 crc kubenswrapper[4757]: I1006 15:06:35.137861 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"be616da6-c336-4c71-b23c-3137a9a6c9ff","Type":"ContainerStarted","Data":"5a31ffc1aa0f0f1f6700876b3a1be8a4b6d421e846c6dbf2072c4b7e1841ee0c"} Oct 06 15:06:35 crc kubenswrapper[4757]: I1006 15:06:35.137911 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"be616da6-c336-4c71-b23c-3137a9a6c9ff","Type":"ContainerStarted","Data":"e9a64c3f71dab1914ed215d3be43e6025435434025a9cdcdf1678d8ac07e3e54"} Oct 06 15:06:35 crc kubenswrapper[4757]: I1006 15:06:35.137929 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"be616da6-c336-4c71-b23c-3137a9a6c9ff","Type":"ContainerStarted","Data":"f0c9707ff79652fcf4b6c63a4586e2194f8046d07ac38c4c25807f394287448b"} Oct 06 15:06:35 crc kubenswrapper[4757]: I1006 15:06:35.143918 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"95d03e2a-e649-41e0-bddb-9523672e5e9c","Type":"ContainerStarted","Data":"f1bb5e2d9f6af7d3633c6349cbbc702575946baf800d51c49b18d131831cfe2d"} Oct 06 15:06:35 crc kubenswrapper[4757]: I1006 15:06:35.143965 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"95d03e2a-e649-41e0-bddb-9523672e5e9c","Type":"ContainerStarted","Data":"f76457a27cf18323fd64f70226f2bc43e3024a6f8c0bf563f0b0479539e35025"} Oct 06 15:06:35 crc kubenswrapper[4757]: I1006 15:06:35.143981 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"95d03e2a-e649-41e0-bddb-9523672e5e9c","Type":"ContainerStarted","Data":"67f7ace6156168af6d9ac2188b3d80b0c1102d1fb5936712927d60224b2a3af8"} Oct 06 15:06:35 crc kubenswrapper[4757]: I1006 15:06:35.150048 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-1" event={"ID":"66e11e3d-a78e-4706-a789-a997c6f73a64","Type":"ContainerStarted","Data":"594024b22862a0425737731f71a4a3715e4557fac0c60fc1a47ee5b03f49696c"} Oct 06 15:06:35 crc kubenswrapper[4757]: I1006 15:06:35.150127 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-1" event={"ID":"66e11e3d-a78e-4706-a789-a997c6f73a64","Type":"ContainerStarted","Data":"4292d8e7e7931c250fe917dbe71eaf3361d29d0842cc1c2f30030b43f82dc3ed"} Oct 06 15:06:35 crc kubenswrapper[4757]: I1006 15:06:35.150144 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-1" event={"ID":"66e11e3d-a78e-4706-a789-a997c6f73a64","Type":"ContainerStarted","Data":"81da3861d1a2c2d0c03a6f61008b060e61541b5acfd658e0087b6d7fd61d440b"} Oct 06 15:06:35 crc kubenswrapper[4757]: I1006 15:06:35.153178 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-2" podStartSLOduration=3.153160691 podStartE2EDuration="3.153160691s" podCreationTimestamp="2025-10-06 15:06:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 15:06:35.145986069 +0000 UTC m=+5283.643304606" watchObservedRunningTime="2025-10-06 15:06:35.153160691 +0000 UTC m=+5283.650479228" Oct 06 15:06:35 crc kubenswrapper[4757]: I1006 15:06:35.178374 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=3.178354456 podStartE2EDuration="3.178354456s" podCreationTimestamp="2025-10-06 15:06:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 15:06:35.174329205 +0000 UTC m=+5283.671647762" watchObservedRunningTime="2025-10-06 15:06:35.178354456 +0000 UTC m=+5283.675672993" Oct 06 15:06:35 crc kubenswrapper[4757]: I1006 15:06:35.199486 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=3.199465069 podStartE2EDuration="3.199465069s" podCreationTimestamp="2025-10-06 15:06:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 15:06:35.19734476 +0000 UTC m=+5283.694663307" watchObservedRunningTime="2025-10-06 15:06:35.199465069 +0000 UTC m=+5283.696783616" Oct 06 15:06:35 crc kubenswrapper[4757]: I1006 15:06:35.250529 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-1" podStartSLOduration=3.25050491 podStartE2EDuration="3.25050491s" podCreationTimestamp="2025-10-06 15:06:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 15:06:35.228129046 +0000 UTC m=+5283.725447613" watchObservedRunningTime="2025-10-06 15:06:35.25050491 +0000 UTC m=+5283.747823447" Oct 06 15:06:35 crc kubenswrapper[4757]: I1006 15:06:35.255141 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-1" podStartSLOduration=3.255086497 podStartE2EDuration="3.255086497s" podCreationTimestamp="2025-10-06 15:06:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 15:06:35.246836011 +0000 UTC m=+5283.744154548" watchObservedRunningTime="2025-10-06 15:06:35.255086497 +0000 UTC m=+5283.752405044" Oct 06 15:06:35 crc kubenswrapper[4757]: I1006 15:06:35.476475 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-2"] Oct 06 15:06:35 crc kubenswrapper[4757]: W1006 15:06:35.486170 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1fbe1234_296b_41ce_a03e_fde5b9abc505.slice/crio-4d6847bbce185f721eec22cc29e19dd597f89669daebdefd9fa750805741025c WatchSource:0}: Error finding container 4d6847bbce185f721eec22cc29e19dd597f89669daebdefd9fa750805741025c: Status 404 returned error can't find the container with id 4d6847bbce185f721eec22cc29e19dd597f89669daebdefd9fa750805741025c Oct 06 15:06:36 crc kubenswrapper[4757]: I1006 15:06:36.161946 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-2" event={"ID":"1fbe1234-296b-41ce-a03e-fde5b9abc505","Type":"ContainerStarted","Data":"acb1697be268c086b0ee4c30cdcec087ae0e21a2479d51e961fd9e7725223881"} Oct 06 15:06:36 crc kubenswrapper[4757]: I1006 15:06:36.162003 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-2" event={"ID":"1fbe1234-296b-41ce-a03e-fde5b9abc505","Type":"ContainerStarted","Data":"78447986cc2f1bfe717d550dae81fe7863540ad42999b198daf5f84b16b5a6e4"} Oct 06 15:06:36 crc kubenswrapper[4757]: I1006 15:06:36.162023 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-2" event={"ID":"1fbe1234-296b-41ce-a03e-fde5b9abc505","Type":"ContainerStarted","Data":"4d6847bbce185f721eec22cc29e19dd597f89669daebdefd9fa750805741025c"} Oct 06 15:06:36 crc kubenswrapper[4757]: I1006 15:06:36.179132 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-2" podStartSLOduration=4.179070014 podStartE2EDuration="4.179070014s" podCreationTimestamp="2025-10-06 15:06:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 15:06:36.178674602 +0000 UTC m=+5284.675993149" watchObservedRunningTime="2025-10-06 15:06:36.179070014 +0000 UTC m=+5284.676388551" Oct 06 15:06:36 crc kubenswrapper[4757]: I1006 15:06:36.626819 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Oct 06 15:06:36 crc kubenswrapper[4757]: I1006 15:06:36.646198 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-2" Oct 06 15:06:36 crc kubenswrapper[4757]: I1006 15:06:36.670675 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-1" Oct 06 15:06:36 crc kubenswrapper[4757]: I1006 15:06:36.797209 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-1" Oct 06 15:06:36 crc kubenswrapper[4757]: I1006 15:06:36.870042 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-2" Oct 06 15:06:37 crc kubenswrapper[4757]: I1006 15:06:37.071863 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Oct 06 15:06:37 crc kubenswrapper[4757]: I1006 15:06:37.117612 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Oct 06 15:06:37 crc kubenswrapper[4757]: I1006 15:06:37.170859 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Oct 06 15:06:38 crc kubenswrapper[4757]: I1006 15:06:38.627024 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Oct 06 15:06:38 crc kubenswrapper[4757]: I1006 15:06:38.646289 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-2" Oct 06 15:06:38 crc kubenswrapper[4757]: I1006 15:06:38.670951 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-1" Oct 06 15:06:38 crc kubenswrapper[4757]: I1006 15:06:38.798286 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-1" Oct 06 15:06:38 crc kubenswrapper[4757]: I1006 15:06:38.869774 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-2" Oct 06 15:06:39 crc kubenswrapper[4757]: I1006 15:06:39.114651 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Oct 06 15:06:39 crc kubenswrapper[4757]: I1006 15:06:39.411577 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7cb5994bf9-xqjrh"] Oct 06 15:06:39 crc kubenswrapper[4757]: I1006 15:06:39.413141 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7cb5994bf9-xqjrh" Oct 06 15:06:39 crc kubenswrapper[4757]: I1006 15:06:39.415763 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Oct 06 15:06:39 crc kubenswrapper[4757]: I1006 15:06:39.436739 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7cb5994bf9-xqjrh"] Oct 06 15:06:39 crc kubenswrapper[4757]: I1006 15:06:39.496591 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q8br9\" (UniqueName: \"kubernetes.io/projected/6a34f4ab-4af0-45d4-953d-69566078091b-kube-api-access-q8br9\") pod \"dnsmasq-dns-7cb5994bf9-xqjrh\" (UID: \"6a34f4ab-4af0-45d4-953d-69566078091b\") " pod="openstack/dnsmasq-dns-7cb5994bf9-xqjrh" Oct 06 15:06:39 crc kubenswrapper[4757]: I1006 15:06:39.496976 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6a34f4ab-4af0-45d4-953d-69566078091b-config\") pod \"dnsmasq-dns-7cb5994bf9-xqjrh\" (UID: \"6a34f4ab-4af0-45d4-953d-69566078091b\") " pod="openstack/dnsmasq-dns-7cb5994bf9-xqjrh" Oct 06 15:06:39 crc kubenswrapper[4757]: I1006 15:06:39.497004 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6a34f4ab-4af0-45d4-953d-69566078091b-dns-svc\") pod \"dnsmasq-dns-7cb5994bf9-xqjrh\" (UID: \"6a34f4ab-4af0-45d4-953d-69566078091b\") " pod="openstack/dnsmasq-dns-7cb5994bf9-xqjrh" Oct 06 15:06:39 crc kubenswrapper[4757]: I1006 15:06:39.497055 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6a34f4ab-4af0-45d4-953d-69566078091b-ovsdbserver-sb\") pod \"dnsmasq-dns-7cb5994bf9-xqjrh\" (UID: \"6a34f4ab-4af0-45d4-953d-69566078091b\") " pod="openstack/dnsmasq-dns-7cb5994bf9-xqjrh" Oct 06 15:06:39 crc kubenswrapper[4757]: I1006 15:06:39.598557 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q8br9\" (UniqueName: \"kubernetes.io/projected/6a34f4ab-4af0-45d4-953d-69566078091b-kube-api-access-q8br9\") pod \"dnsmasq-dns-7cb5994bf9-xqjrh\" (UID: \"6a34f4ab-4af0-45d4-953d-69566078091b\") " pod="openstack/dnsmasq-dns-7cb5994bf9-xqjrh" Oct 06 15:06:39 crc kubenswrapper[4757]: I1006 15:06:39.598648 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6a34f4ab-4af0-45d4-953d-69566078091b-config\") pod \"dnsmasq-dns-7cb5994bf9-xqjrh\" (UID: \"6a34f4ab-4af0-45d4-953d-69566078091b\") " pod="openstack/dnsmasq-dns-7cb5994bf9-xqjrh" Oct 06 15:06:39 crc kubenswrapper[4757]: I1006 15:06:39.598672 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6a34f4ab-4af0-45d4-953d-69566078091b-dns-svc\") pod \"dnsmasq-dns-7cb5994bf9-xqjrh\" (UID: \"6a34f4ab-4af0-45d4-953d-69566078091b\") " pod="openstack/dnsmasq-dns-7cb5994bf9-xqjrh" Oct 06 15:06:39 crc kubenswrapper[4757]: I1006 15:06:39.598711 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6a34f4ab-4af0-45d4-953d-69566078091b-ovsdbserver-sb\") pod \"dnsmasq-dns-7cb5994bf9-xqjrh\" (UID: \"6a34f4ab-4af0-45d4-953d-69566078091b\") " pod="openstack/dnsmasq-dns-7cb5994bf9-xqjrh" Oct 06 15:06:39 crc kubenswrapper[4757]: I1006 15:06:39.599639 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6a34f4ab-4af0-45d4-953d-69566078091b-ovsdbserver-sb\") pod \"dnsmasq-dns-7cb5994bf9-xqjrh\" (UID: \"6a34f4ab-4af0-45d4-953d-69566078091b\") " pod="openstack/dnsmasq-dns-7cb5994bf9-xqjrh" Oct 06 15:06:39 crc kubenswrapper[4757]: I1006 15:06:39.599639 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6a34f4ab-4af0-45d4-953d-69566078091b-config\") pod \"dnsmasq-dns-7cb5994bf9-xqjrh\" (UID: \"6a34f4ab-4af0-45d4-953d-69566078091b\") " pod="openstack/dnsmasq-dns-7cb5994bf9-xqjrh" Oct 06 15:06:39 crc kubenswrapper[4757]: I1006 15:06:39.599953 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6a34f4ab-4af0-45d4-953d-69566078091b-dns-svc\") pod \"dnsmasq-dns-7cb5994bf9-xqjrh\" (UID: \"6a34f4ab-4af0-45d4-953d-69566078091b\") " pod="openstack/dnsmasq-dns-7cb5994bf9-xqjrh" Oct 06 15:06:39 crc kubenswrapper[4757]: I1006 15:06:39.616179 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q8br9\" (UniqueName: \"kubernetes.io/projected/6a34f4ab-4af0-45d4-953d-69566078091b-kube-api-access-q8br9\") pod \"dnsmasq-dns-7cb5994bf9-xqjrh\" (UID: \"6a34f4ab-4af0-45d4-953d-69566078091b\") " pod="openstack/dnsmasq-dns-7cb5994bf9-xqjrh" Oct 06 15:06:39 crc kubenswrapper[4757]: I1006 15:06:39.673511 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Oct 06 15:06:39 crc kubenswrapper[4757]: I1006 15:06:39.688391 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-2" Oct 06 15:06:39 crc kubenswrapper[4757]: I1006 15:06:39.730556 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Oct 06 15:06:39 crc kubenswrapper[4757]: I1006 15:06:39.730915 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7cb5994bf9-xqjrh" Oct 06 15:06:39 crc kubenswrapper[4757]: I1006 15:06:39.731995 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-2" Oct 06 15:06:39 crc kubenswrapper[4757]: I1006 15:06:39.736478 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-1" Oct 06 15:06:39 crc kubenswrapper[4757]: I1006 15:06:39.794326 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-1" Oct 06 15:06:39 crc kubenswrapper[4757]: I1006 15:06:39.861103 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-1" Oct 06 15:06:39 crc kubenswrapper[4757]: I1006 15:06:39.943419 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-1" Oct 06 15:06:39 crc kubenswrapper[4757]: I1006 15:06:39.962071 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7cb5994bf9-xqjrh"] Oct 06 15:06:39 crc kubenswrapper[4757]: I1006 15:06:39.982942 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-2" Oct 06 15:06:40 crc kubenswrapper[4757]: I1006 15:06:40.005911 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-79d994df79-65xps"] Oct 06 15:06:40 crc kubenswrapper[4757]: I1006 15:06:40.007773 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-79d994df79-65xps" Oct 06 15:06:40 crc kubenswrapper[4757]: I1006 15:06:40.010613 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Oct 06 15:06:40 crc kubenswrapper[4757]: I1006 15:06:40.016579 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-79d994df79-65xps"] Oct 06 15:06:40 crc kubenswrapper[4757]: I1006 15:06:40.111289 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1e6b6aab-a6bc-4d94-8b12-9d306ca4260a-config\") pod \"dnsmasq-dns-79d994df79-65xps\" (UID: \"1e6b6aab-a6bc-4d94-8b12-9d306ca4260a\") " pod="openstack/dnsmasq-dns-79d994df79-65xps" Oct 06 15:06:40 crc kubenswrapper[4757]: I1006 15:06:40.111364 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1e6b6aab-a6bc-4d94-8b12-9d306ca4260a-ovsdbserver-nb\") pod \"dnsmasq-dns-79d994df79-65xps\" (UID: \"1e6b6aab-a6bc-4d94-8b12-9d306ca4260a\") " pod="openstack/dnsmasq-dns-79d994df79-65xps" Oct 06 15:06:40 crc kubenswrapper[4757]: I1006 15:06:40.111527 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1e6b6aab-a6bc-4d94-8b12-9d306ca4260a-ovsdbserver-sb\") pod \"dnsmasq-dns-79d994df79-65xps\" (UID: \"1e6b6aab-a6bc-4d94-8b12-9d306ca4260a\") " pod="openstack/dnsmasq-dns-79d994df79-65xps" Oct 06 15:06:40 crc kubenswrapper[4757]: I1006 15:06:40.111555 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hnbgl\" (UniqueName: \"kubernetes.io/projected/1e6b6aab-a6bc-4d94-8b12-9d306ca4260a-kube-api-access-hnbgl\") pod \"dnsmasq-dns-79d994df79-65xps\" (UID: \"1e6b6aab-a6bc-4d94-8b12-9d306ca4260a\") " pod="openstack/dnsmasq-dns-79d994df79-65xps" Oct 06 15:06:40 crc kubenswrapper[4757]: I1006 15:06:40.111576 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1e6b6aab-a6bc-4d94-8b12-9d306ca4260a-dns-svc\") pod \"dnsmasq-dns-79d994df79-65xps\" (UID: \"1e6b6aab-a6bc-4d94-8b12-9d306ca4260a\") " pod="openstack/dnsmasq-dns-79d994df79-65xps" Oct 06 15:06:40 crc kubenswrapper[4757]: I1006 15:06:40.213107 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1e6b6aab-a6bc-4d94-8b12-9d306ca4260a-ovsdbserver-sb\") pod \"dnsmasq-dns-79d994df79-65xps\" (UID: \"1e6b6aab-a6bc-4d94-8b12-9d306ca4260a\") " pod="openstack/dnsmasq-dns-79d994df79-65xps" Oct 06 15:06:40 crc kubenswrapper[4757]: I1006 15:06:40.213154 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hnbgl\" (UniqueName: \"kubernetes.io/projected/1e6b6aab-a6bc-4d94-8b12-9d306ca4260a-kube-api-access-hnbgl\") pod \"dnsmasq-dns-79d994df79-65xps\" (UID: \"1e6b6aab-a6bc-4d94-8b12-9d306ca4260a\") " pod="openstack/dnsmasq-dns-79d994df79-65xps" Oct 06 15:06:40 crc kubenswrapper[4757]: I1006 15:06:40.213175 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1e6b6aab-a6bc-4d94-8b12-9d306ca4260a-dns-svc\") pod \"dnsmasq-dns-79d994df79-65xps\" (UID: \"1e6b6aab-a6bc-4d94-8b12-9d306ca4260a\") " pod="openstack/dnsmasq-dns-79d994df79-65xps" Oct 06 15:06:40 crc kubenswrapper[4757]: I1006 15:06:40.213208 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1e6b6aab-a6bc-4d94-8b12-9d306ca4260a-config\") pod \"dnsmasq-dns-79d994df79-65xps\" (UID: \"1e6b6aab-a6bc-4d94-8b12-9d306ca4260a\") " pod="openstack/dnsmasq-dns-79d994df79-65xps" Oct 06 15:06:40 crc kubenswrapper[4757]: I1006 15:06:40.213234 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1e6b6aab-a6bc-4d94-8b12-9d306ca4260a-ovsdbserver-nb\") pod \"dnsmasq-dns-79d994df79-65xps\" (UID: \"1e6b6aab-a6bc-4d94-8b12-9d306ca4260a\") " pod="openstack/dnsmasq-dns-79d994df79-65xps" Oct 06 15:06:40 crc kubenswrapper[4757]: I1006 15:06:40.214957 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1e6b6aab-a6bc-4d94-8b12-9d306ca4260a-ovsdbserver-nb\") pod \"dnsmasq-dns-79d994df79-65xps\" (UID: \"1e6b6aab-a6bc-4d94-8b12-9d306ca4260a\") " pod="openstack/dnsmasq-dns-79d994df79-65xps" Oct 06 15:06:40 crc kubenswrapper[4757]: I1006 15:06:40.215321 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1e6b6aab-a6bc-4d94-8b12-9d306ca4260a-config\") pod \"dnsmasq-dns-79d994df79-65xps\" (UID: \"1e6b6aab-a6bc-4d94-8b12-9d306ca4260a\") " pod="openstack/dnsmasq-dns-79d994df79-65xps" Oct 06 15:06:40 crc kubenswrapper[4757]: I1006 15:06:40.215976 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1e6b6aab-a6bc-4d94-8b12-9d306ca4260a-ovsdbserver-sb\") pod \"dnsmasq-dns-79d994df79-65xps\" (UID: \"1e6b6aab-a6bc-4d94-8b12-9d306ca4260a\") " pod="openstack/dnsmasq-dns-79d994df79-65xps" Oct 06 15:06:40 crc kubenswrapper[4757]: I1006 15:06:40.216059 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1e6b6aab-a6bc-4d94-8b12-9d306ca4260a-dns-svc\") pod \"dnsmasq-dns-79d994df79-65xps\" (UID: \"1e6b6aab-a6bc-4d94-8b12-9d306ca4260a\") " pod="openstack/dnsmasq-dns-79d994df79-65xps" Oct 06 15:06:40 crc kubenswrapper[4757]: I1006 15:06:40.232285 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hnbgl\" (UniqueName: \"kubernetes.io/projected/1e6b6aab-a6bc-4d94-8b12-9d306ca4260a-kube-api-access-hnbgl\") pod \"dnsmasq-dns-79d994df79-65xps\" (UID: \"1e6b6aab-a6bc-4d94-8b12-9d306ca4260a\") " pod="openstack/dnsmasq-dns-79d994df79-65xps" Oct 06 15:06:40 crc kubenswrapper[4757]: I1006 15:06:40.242001 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-2" Oct 06 15:06:40 crc kubenswrapper[4757]: I1006 15:06:40.291895 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7cb5994bf9-xqjrh"] Oct 06 15:06:40 crc kubenswrapper[4757]: I1006 15:06:40.329960 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-79d994df79-65xps" Oct 06 15:06:40 crc kubenswrapper[4757]: I1006 15:06:40.746715 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-79d994df79-65xps"] Oct 06 15:06:41 crc kubenswrapper[4757]: I1006 15:06:41.211119 4757 generic.go:334] "Generic (PLEG): container finished" podID="1e6b6aab-a6bc-4d94-8b12-9d306ca4260a" containerID="2ffc14a7f5c29f6aea5c4d0a5ed621ca9d88bc4bb638278c4c9c5ca93a0da6d3" exitCode=0 Oct 06 15:06:41 crc kubenswrapper[4757]: I1006 15:06:41.211255 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-79d994df79-65xps" event={"ID":"1e6b6aab-a6bc-4d94-8b12-9d306ca4260a","Type":"ContainerDied","Data":"2ffc14a7f5c29f6aea5c4d0a5ed621ca9d88bc4bb638278c4c9c5ca93a0da6d3"} Oct 06 15:06:41 crc kubenswrapper[4757]: I1006 15:06:41.211596 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-79d994df79-65xps" event={"ID":"1e6b6aab-a6bc-4d94-8b12-9d306ca4260a","Type":"ContainerStarted","Data":"5645da8190e1d569cbf9429c4320db3db2b3d5799370fe08b22455028331f40f"} Oct 06 15:06:41 crc kubenswrapper[4757]: I1006 15:06:41.213488 4757 generic.go:334] "Generic (PLEG): container finished" podID="6a34f4ab-4af0-45d4-953d-69566078091b" containerID="e30691475ada4e4f271f652ca681ae23772050d34d42158d78d17f1c312d8bc9" exitCode=0 Oct 06 15:06:41 crc kubenswrapper[4757]: I1006 15:06:41.213521 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7cb5994bf9-xqjrh" event={"ID":"6a34f4ab-4af0-45d4-953d-69566078091b","Type":"ContainerDied","Data":"e30691475ada4e4f271f652ca681ae23772050d34d42158d78d17f1c312d8bc9"} Oct 06 15:06:41 crc kubenswrapper[4757]: I1006 15:06:41.213761 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7cb5994bf9-xqjrh" event={"ID":"6a34f4ab-4af0-45d4-953d-69566078091b","Type":"ContainerStarted","Data":"2dd58dd1a4296f0ac44326c9c3648e7d4d55c3020da283439c6958ee54a29895"} Oct 06 15:06:41 crc kubenswrapper[4757]: I1006 15:06:41.479955 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7cb5994bf9-xqjrh" Oct 06 15:06:41 crc kubenswrapper[4757]: I1006 15:06:41.640118 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q8br9\" (UniqueName: \"kubernetes.io/projected/6a34f4ab-4af0-45d4-953d-69566078091b-kube-api-access-q8br9\") pod \"6a34f4ab-4af0-45d4-953d-69566078091b\" (UID: \"6a34f4ab-4af0-45d4-953d-69566078091b\") " Oct 06 15:06:41 crc kubenswrapper[4757]: I1006 15:06:41.640213 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6a34f4ab-4af0-45d4-953d-69566078091b-ovsdbserver-sb\") pod \"6a34f4ab-4af0-45d4-953d-69566078091b\" (UID: \"6a34f4ab-4af0-45d4-953d-69566078091b\") " Oct 06 15:06:41 crc kubenswrapper[4757]: I1006 15:06:41.640235 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6a34f4ab-4af0-45d4-953d-69566078091b-config\") pod \"6a34f4ab-4af0-45d4-953d-69566078091b\" (UID: \"6a34f4ab-4af0-45d4-953d-69566078091b\") " Oct 06 15:06:41 crc kubenswrapper[4757]: I1006 15:06:41.640297 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6a34f4ab-4af0-45d4-953d-69566078091b-dns-svc\") pod \"6a34f4ab-4af0-45d4-953d-69566078091b\" (UID: \"6a34f4ab-4af0-45d4-953d-69566078091b\") " Oct 06 15:06:41 crc kubenswrapper[4757]: I1006 15:06:41.651530 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6a34f4ab-4af0-45d4-953d-69566078091b-kube-api-access-q8br9" (OuterVolumeSpecName: "kube-api-access-q8br9") pod "6a34f4ab-4af0-45d4-953d-69566078091b" (UID: "6a34f4ab-4af0-45d4-953d-69566078091b"). InnerVolumeSpecName "kube-api-access-q8br9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 15:06:41 crc kubenswrapper[4757]: I1006 15:06:41.661533 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6a34f4ab-4af0-45d4-953d-69566078091b-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "6a34f4ab-4af0-45d4-953d-69566078091b" (UID: "6a34f4ab-4af0-45d4-953d-69566078091b"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 15:06:41 crc kubenswrapper[4757]: I1006 15:06:41.668162 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6a34f4ab-4af0-45d4-953d-69566078091b-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "6a34f4ab-4af0-45d4-953d-69566078091b" (UID: "6a34f4ab-4af0-45d4-953d-69566078091b"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 15:06:41 crc kubenswrapper[4757]: I1006 15:06:41.671322 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6a34f4ab-4af0-45d4-953d-69566078091b-config" (OuterVolumeSpecName: "config") pod "6a34f4ab-4af0-45d4-953d-69566078091b" (UID: "6a34f4ab-4af0-45d4-953d-69566078091b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 15:06:41 crc kubenswrapper[4757]: I1006 15:06:41.742560 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q8br9\" (UniqueName: \"kubernetes.io/projected/6a34f4ab-4af0-45d4-953d-69566078091b-kube-api-access-q8br9\") on node \"crc\" DevicePath \"\"" Oct 06 15:06:41 crc kubenswrapper[4757]: I1006 15:06:41.742622 4757 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6a34f4ab-4af0-45d4-953d-69566078091b-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 06 15:06:41 crc kubenswrapper[4757]: I1006 15:06:41.742641 4757 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6a34f4ab-4af0-45d4-953d-69566078091b-config\") on node \"crc\" DevicePath \"\"" Oct 06 15:06:41 crc kubenswrapper[4757]: I1006 15:06:41.742656 4757 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6a34f4ab-4af0-45d4-953d-69566078091b-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 06 15:06:42 crc kubenswrapper[4757]: I1006 15:06:42.221984 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-79d994df79-65xps" event={"ID":"1e6b6aab-a6bc-4d94-8b12-9d306ca4260a","Type":"ContainerStarted","Data":"d2d295724c333bdd2fc3e8a7f7b3afafb384048883cb1085c44fa09082c0c7cc"} Oct 06 15:06:42 crc kubenswrapper[4757]: I1006 15:06:42.222901 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-79d994df79-65xps" Oct 06 15:06:42 crc kubenswrapper[4757]: I1006 15:06:42.225223 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7cb5994bf9-xqjrh" Oct 06 15:06:42 crc kubenswrapper[4757]: I1006 15:06:42.225163 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7cb5994bf9-xqjrh" event={"ID":"6a34f4ab-4af0-45d4-953d-69566078091b","Type":"ContainerDied","Data":"2dd58dd1a4296f0ac44326c9c3648e7d4d55c3020da283439c6958ee54a29895"} Oct 06 15:06:42 crc kubenswrapper[4757]: I1006 15:06:42.225721 4757 scope.go:117] "RemoveContainer" containerID="e30691475ada4e4f271f652ca681ae23772050d34d42158d78d17f1c312d8bc9" Oct 06 15:06:42 crc kubenswrapper[4757]: I1006 15:06:42.246256 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-79d994df79-65xps" podStartSLOduration=3.246237156 podStartE2EDuration="3.246237156s" podCreationTimestamp="2025-10-06 15:06:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 15:06:42.241287236 +0000 UTC m=+5290.738605773" watchObservedRunningTime="2025-10-06 15:06:42.246237156 +0000 UTC m=+5290.743555693" Oct 06 15:06:42 crc kubenswrapper[4757]: I1006 15:06:42.324965 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7cb5994bf9-xqjrh"] Oct 06 15:06:42 crc kubenswrapper[4757]: I1006 15:06:42.331722 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7cb5994bf9-xqjrh"] Oct 06 15:06:42 crc kubenswrapper[4757]: I1006 15:06:42.646762 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-copy-data"] Oct 06 15:06:42 crc kubenswrapper[4757]: E1006 15:06:42.647120 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6a34f4ab-4af0-45d4-953d-69566078091b" containerName="init" Oct 06 15:06:42 crc kubenswrapper[4757]: I1006 15:06:42.647136 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="6a34f4ab-4af0-45d4-953d-69566078091b" containerName="init" Oct 06 15:06:42 crc kubenswrapper[4757]: I1006 15:06:42.647308 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="6a34f4ab-4af0-45d4-953d-69566078091b" containerName="init" Oct 06 15:06:42 crc kubenswrapper[4757]: I1006 15:06:42.647855 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-copy-data" Oct 06 15:06:42 crc kubenswrapper[4757]: I1006 15:06:42.652591 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovn-data-cert" Oct 06 15:06:42 crc kubenswrapper[4757]: I1006 15:06:42.652738 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-copy-data"] Oct 06 15:06:42 crc kubenswrapper[4757]: I1006 15:06:42.759495 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n92ql\" (UniqueName: \"kubernetes.io/projected/5a471640-27f0-4f88-8488-fae3ef555c54-kube-api-access-n92ql\") pod \"ovn-copy-data\" (UID: \"5a471640-27f0-4f88-8488-fae3ef555c54\") " pod="openstack/ovn-copy-data" Oct 06 15:06:42 crc kubenswrapper[4757]: I1006 15:06:42.759532 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-data-cert\" (UniqueName: \"kubernetes.io/secret/5a471640-27f0-4f88-8488-fae3ef555c54-ovn-data-cert\") pod \"ovn-copy-data\" (UID: \"5a471640-27f0-4f88-8488-fae3ef555c54\") " pod="openstack/ovn-copy-data" Oct 06 15:06:42 crc kubenswrapper[4757]: I1006 15:06:42.759978 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-70f0534c-c84d-4ca6-98a7-35f1ffcde5ad\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-70f0534c-c84d-4ca6-98a7-35f1ffcde5ad\") pod \"ovn-copy-data\" (UID: \"5a471640-27f0-4f88-8488-fae3ef555c54\") " pod="openstack/ovn-copy-data" Oct 06 15:06:42 crc kubenswrapper[4757]: I1006 15:06:42.861124 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-70f0534c-c84d-4ca6-98a7-35f1ffcde5ad\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-70f0534c-c84d-4ca6-98a7-35f1ffcde5ad\") pod \"ovn-copy-data\" (UID: \"5a471640-27f0-4f88-8488-fae3ef555c54\") " pod="openstack/ovn-copy-data" Oct 06 15:06:42 crc kubenswrapper[4757]: I1006 15:06:42.861211 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n92ql\" (UniqueName: \"kubernetes.io/projected/5a471640-27f0-4f88-8488-fae3ef555c54-kube-api-access-n92ql\") pod \"ovn-copy-data\" (UID: \"5a471640-27f0-4f88-8488-fae3ef555c54\") " pod="openstack/ovn-copy-data" Oct 06 15:06:42 crc kubenswrapper[4757]: I1006 15:06:42.861241 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-data-cert\" (UniqueName: \"kubernetes.io/secret/5a471640-27f0-4f88-8488-fae3ef555c54-ovn-data-cert\") pod \"ovn-copy-data\" (UID: \"5a471640-27f0-4f88-8488-fae3ef555c54\") " pod="openstack/ovn-copy-data" Oct 06 15:06:42 crc kubenswrapper[4757]: I1006 15:06:42.862752 4757 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Oct 06 15:06:42 crc kubenswrapper[4757]: I1006 15:06:42.862778 4757 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-70f0534c-c84d-4ca6-98a7-35f1ffcde5ad\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-70f0534c-c84d-4ca6-98a7-35f1ffcde5ad\") pod \"ovn-copy-data\" (UID: \"5a471640-27f0-4f88-8488-fae3ef555c54\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/5b0476b371193c5e5b2b4d210d89142b9444e118720961e168e1f9cda091c251/globalmount\"" pod="openstack/ovn-copy-data" Oct 06 15:06:42 crc kubenswrapper[4757]: I1006 15:06:42.865288 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-data-cert\" (UniqueName: \"kubernetes.io/secret/5a471640-27f0-4f88-8488-fae3ef555c54-ovn-data-cert\") pod \"ovn-copy-data\" (UID: \"5a471640-27f0-4f88-8488-fae3ef555c54\") " pod="openstack/ovn-copy-data" Oct 06 15:06:42 crc kubenswrapper[4757]: I1006 15:06:42.876842 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n92ql\" (UniqueName: \"kubernetes.io/projected/5a471640-27f0-4f88-8488-fae3ef555c54-kube-api-access-n92ql\") pod \"ovn-copy-data\" (UID: \"5a471640-27f0-4f88-8488-fae3ef555c54\") " pod="openstack/ovn-copy-data" Oct 06 15:06:42 crc kubenswrapper[4757]: I1006 15:06:42.893995 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-70f0534c-c84d-4ca6-98a7-35f1ffcde5ad\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-70f0534c-c84d-4ca6-98a7-35f1ffcde5ad\") pod \"ovn-copy-data\" (UID: \"5a471640-27f0-4f88-8488-fae3ef555c54\") " pod="openstack/ovn-copy-data" Oct 06 15:06:42 crc kubenswrapper[4757]: I1006 15:06:42.973425 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-copy-data" Oct 06 15:06:43 crc kubenswrapper[4757]: I1006 15:06:43.458269 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-copy-data"] Oct 06 15:06:44 crc kubenswrapper[4757]: I1006 15:06:44.181030 4757 scope.go:117] "RemoveContainer" containerID="5c4563212ce072d71c658e00c8096ab0ddad498c69fa13e6bee8d2a76ec2f125" Oct 06 15:06:44 crc kubenswrapper[4757]: I1006 15:06:44.193991 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6a34f4ab-4af0-45d4-953d-69566078091b" path="/var/lib/kubelet/pods/6a34f4ab-4af0-45d4-953d-69566078091b/volumes" Oct 06 15:06:44 crc kubenswrapper[4757]: I1006 15:06:44.245443 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-copy-data" event={"ID":"5a471640-27f0-4f88-8488-fae3ef555c54","Type":"ContainerStarted","Data":"e4772d76561037e1b9f071d9af73059bcc11d0257e0460d7e1408ffb5ae60bea"} Oct 06 15:06:45 crc kubenswrapper[4757]: I1006 15:06:45.258830 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" event={"ID":"0010c888-d5ad-4b2b-8309-1647fdf0dee3","Type":"ContainerStarted","Data":"6ada3739de20b08be50b8e188471c034ef69b5ce1559383ec174465e8517cd1b"} Oct 06 15:06:47 crc kubenswrapper[4757]: I1006 15:06:47.287250 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-copy-data" event={"ID":"5a471640-27f0-4f88-8488-fae3ef555c54","Type":"ContainerStarted","Data":"2852e1cbc9859fe4d7ce08a02d46c7a8a7f397b117a0ba6f00d9e19aafb9adf4"} Oct 06 15:06:47 crc kubenswrapper[4757]: I1006 15:06:47.312503 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-copy-data" podStartSLOduration=3.568737658 podStartE2EDuration="6.312467053s" podCreationTimestamp="2025-10-06 15:06:41 +0000 UTC" firstStartedPulling="2025-10-06 15:06:43.466517966 +0000 UTC m=+5291.963836503" lastFinishedPulling="2025-10-06 15:06:46.210247361 +0000 UTC m=+5294.707565898" observedRunningTime="2025-10-06 15:06:47.309272919 +0000 UTC m=+5295.806591526" watchObservedRunningTime="2025-10-06 15:06:47.312467053 +0000 UTC m=+5295.809785600" Oct 06 15:06:50 crc kubenswrapper[4757]: I1006 15:06:50.331297 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-79d994df79-65xps" Oct 06 15:06:50 crc kubenswrapper[4757]: I1006 15:06:50.380675 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5b9989c9f7-4zdvt"] Oct 06 15:06:50 crc kubenswrapper[4757]: I1006 15:06:50.380938 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5b9989c9f7-4zdvt" podUID="2bd44821-3471-4f95-a1b3-9d751852ed32" containerName="dnsmasq-dns" containerID="cri-o://e0d7c08f91f065414abc28e45c78157f2e847ef09cf99a5af832bd0047a7dfd3" gracePeriod=10 Oct 06 15:06:50 crc kubenswrapper[4757]: I1006 15:06:50.945631 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b9989c9f7-4zdvt" Oct 06 15:06:51 crc kubenswrapper[4757]: I1006 15:06:51.022783 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2bd44821-3471-4f95-a1b3-9d751852ed32-dns-svc\") pod \"2bd44821-3471-4f95-a1b3-9d751852ed32\" (UID: \"2bd44821-3471-4f95-a1b3-9d751852ed32\") " Oct 06 15:06:51 crc kubenswrapper[4757]: I1006 15:06:51.022817 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8cx9g\" (UniqueName: \"kubernetes.io/projected/2bd44821-3471-4f95-a1b3-9d751852ed32-kube-api-access-8cx9g\") pod \"2bd44821-3471-4f95-a1b3-9d751852ed32\" (UID: \"2bd44821-3471-4f95-a1b3-9d751852ed32\") " Oct 06 15:06:51 crc kubenswrapper[4757]: I1006 15:06:51.022841 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2bd44821-3471-4f95-a1b3-9d751852ed32-config\") pod \"2bd44821-3471-4f95-a1b3-9d751852ed32\" (UID: \"2bd44821-3471-4f95-a1b3-9d751852ed32\") " Oct 06 15:06:51 crc kubenswrapper[4757]: I1006 15:06:51.028815 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2bd44821-3471-4f95-a1b3-9d751852ed32-kube-api-access-8cx9g" (OuterVolumeSpecName: "kube-api-access-8cx9g") pod "2bd44821-3471-4f95-a1b3-9d751852ed32" (UID: "2bd44821-3471-4f95-a1b3-9d751852ed32"). InnerVolumeSpecName "kube-api-access-8cx9g". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 15:06:51 crc kubenswrapper[4757]: I1006 15:06:51.064553 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2bd44821-3471-4f95-a1b3-9d751852ed32-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "2bd44821-3471-4f95-a1b3-9d751852ed32" (UID: "2bd44821-3471-4f95-a1b3-9d751852ed32"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 15:06:51 crc kubenswrapper[4757]: I1006 15:06:51.079658 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2bd44821-3471-4f95-a1b3-9d751852ed32-config" (OuterVolumeSpecName: "config") pod "2bd44821-3471-4f95-a1b3-9d751852ed32" (UID: "2bd44821-3471-4f95-a1b3-9d751852ed32"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 15:06:51 crc kubenswrapper[4757]: I1006 15:06:51.124780 4757 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2bd44821-3471-4f95-a1b3-9d751852ed32-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 06 15:06:51 crc kubenswrapper[4757]: I1006 15:06:51.124822 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8cx9g\" (UniqueName: \"kubernetes.io/projected/2bd44821-3471-4f95-a1b3-9d751852ed32-kube-api-access-8cx9g\") on node \"crc\" DevicePath \"\"" Oct 06 15:06:51 crc kubenswrapper[4757]: I1006 15:06:51.124834 4757 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2bd44821-3471-4f95-a1b3-9d751852ed32-config\") on node \"crc\" DevicePath \"\"" Oct 06 15:06:51 crc kubenswrapper[4757]: I1006 15:06:51.326267 4757 generic.go:334] "Generic (PLEG): container finished" podID="2bd44821-3471-4f95-a1b3-9d751852ed32" containerID="e0d7c08f91f065414abc28e45c78157f2e847ef09cf99a5af832bd0047a7dfd3" exitCode=0 Oct 06 15:06:51 crc kubenswrapper[4757]: I1006 15:06:51.326310 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b9989c9f7-4zdvt" event={"ID":"2bd44821-3471-4f95-a1b3-9d751852ed32","Type":"ContainerDied","Data":"e0d7c08f91f065414abc28e45c78157f2e847ef09cf99a5af832bd0047a7dfd3"} Oct 06 15:06:51 crc kubenswrapper[4757]: I1006 15:06:51.326336 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b9989c9f7-4zdvt" Oct 06 15:06:51 crc kubenswrapper[4757]: I1006 15:06:51.326362 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b9989c9f7-4zdvt" event={"ID":"2bd44821-3471-4f95-a1b3-9d751852ed32","Type":"ContainerDied","Data":"53c740f60446a338aaaab1e01c7b7be28b0270438fea9a27426c9fd6cda1ff2c"} Oct 06 15:06:51 crc kubenswrapper[4757]: I1006 15:06:51.326387 4757 scope.go:117] "RemoveContainer" containerID="e0d7c08f91f065414abc28e45c78157f2e847ef09cf99a5af832bd0047a7dfd3" Oct 06 15:06:51 crc kubenswrapper[4757]: I1006 15:06:51.358140 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5b9989c9f7-4zdvt"] Oct 06 15:06:51 crc kubenswrapper[4757]: I1006 15:06:51.358206 4757 scope.go:117] "RemoveContainer" containerID="eb410850917fa50cbd0546a549dd4d800303f4f32397d400df27145058e3efea" Oct 06 15:06:51 crc kubenswrapper[4757]: I1006 15:06:51.364227 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5b9989c9f7-4zdvt"] Oct 06 15:06:51 crc kubenswrapper[4757]: I1006 15:06:51.397342 4757 scope.go:117] "RemoveContainer" containerID="e0d7c08f91f065414abc28e45c78157f2e847ef09cf99a5af832bd0047a7dfd3" Oct 06 15:06:51 crc kubenswrapper[4757]: E1006 15:06:51.398000 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e0d7c08f91f065414abc28e45c78157f2e847ef09cf99a5af832bd0047a7dfd3\": container with ID starting with e0d7c08f91f065414abc28e45c78157f2e847ef09cf99a5af832bd0047a7dfd3 not found: ID does not exist" containerID="e0d7c08f91f065414abc28e45c78157f2e847ef09cf99a5af832bd0047a7dfd3" Oct 06 15:06:51 crc kubenswrapper[4757]: I1006 15:06:51.398034 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e0d7c08f91f065414abc28e45c78157f2e847ef09cf99a5af832bd0047a7dfd3"} err="failed to get container status \"e0d7c08f91f065414abc28e45c78157f2e847ef09cf99a5af832bd0047a7dfd3\": rpc error: code = NotFound desc = could not find container \"e0d7c08f91f065414abc28e45c78157f2e847ef09cf99a5af832bd0047a7dfd3\": container with ID starting with e0d7c08f91f065414abc28e45c78157f2e847ef09cf99a5af832bd0047a7dfd3 not found: ID does not exist" Oct 06 15:06:51 crc kubenswrapper[4757]: I1006 15:06:51.398059 4757 scope.go:117] "RemoveContainer" containerID="eb410850917fa50cbd0546a549dd4d800303f4f32397d400df27145058e3efea" Oct 06 15:06:51 crc kubenswrapper[4757]: E1006 15:06:51.398543 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eb410850917fa50cbd0546a549dd4d800303f4f32397d400df27145058e3efea\": container with ID starting with eb410850917fa50cbd0546a549dd4d800303f4f32397d400df27145058e3efea not found: ID does not exist" containerID="eb410850917fa50cbd0546a549dd4d800303f4f32397d400df27145058e3efea" Oct 06 15:06:51 crc kubenswrapper[4757]: I1006 15:06:51.398573 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eb410850917fa50cbd0546a549dd4d800303f4f32397d400df27145058e3efea"} err="failed to get container status \"eb410850917fa50cbd0546a549dd4d800303f4f32397d400df27145058e3efea\": rpc error: code = NotFound desc = could not find container \"eb410850917fa50cbd0546a549dd4d800303f4f32397d400df27145058e3efea\": container with ID starting with eb410850917fa50cbd0546a549dd4d800303f4f32397d400df27145058e3efea not found: ID does not exist" Oct 06 15:06:51 crc kubenswrapper[4757]: I1006 15:06:51.998943 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Oct 06 15:06:51 crc kubenswrapper[4757]: E1006 15:06:51.999372 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2bd44821-3471-4f95-a1b3-9d751852ed32" containerName="init" Oct 06 15:06:51 crc kubenswrapper[4757]: I1006 15:06:51.999395 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="2bd44821-3471-4f95-a1b3-9d751852ed32" containerName="init" Oct 06 15:06:51 crc kubenswrapper[4757]: E1006 15:06:51.999416 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2bd44821-3471-4f95-a1b3-9d751852ed32" containerName="dnsmasq-dns" Oct 06 15:06:51 crc kubenswrapper[4757]: I1006 15:06:51.999423 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="2bd44821-3471-4f95-a1b3-9d751852ed32" containerName="dnsmasq-dns" Oct 06 15:06:51 crc kubenswrapper[4757]: I1006 15:06:51.999620 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="2bd44821-3471-4f95-a1b3-9d751852ed32" containerName="dnsmasq-dns" Oct 06 15:06:52 crc kubenswrapper[4757]: I1006 15:06:52.000712 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Oct 06 15:06:52 crc kubenswrapper[4757]: I1006 15:06:52.004066 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Oct 06 15:06:52 crc kubenswrapper[4757]: I1006 15:06:52.004154 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-p79kv" Oct 06 15:06:52 crc kubenswrapper[4757]: I1006 15:06:52.004512 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Oct 06 15:06:52 crc kubenswrapper[4757]: I1006 15:06:52.004532 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovnnorthd-ovndbs" Oct 06 15:06:52 crc kubenswrapper[4757]: I1006 15:06:52.024133 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Oct 06 15:06:52 crc kubenswrapper[4757]: I1006 15:06:52.140156 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/1514d2be-d117-4239-bb79-e73114d631fa-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"1514d2be-d117-4239-bb79-e73114d631fa\") " pod="openstack/ovn-northd-0" Oct 06 15:06:52 crc kubenswrapper[4757]: I1006 15:06:52.140215 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5zs4k\" (UniqueName: \"kubernetes.io/projected/1514d2be-d117-4239-bb79-e73114d631fa-kube-api-access-5zs4k\") pod \"ovn-northd-0\" (UID: \"1514d2be-d117-4239-bb79-e73114d631fa\") " pod="openstack/ovn-northd-0" Oct 06 15:06:52 crc kubenswrapper[4757]: I1006 15:06:52.140237 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1514d2be-d117-4239-bb79-e73114d631fa-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"1514d2be-d117-4239-bb79-e73114d631fa\") " pod="openstack/ovn-northd-0" Oct 06 15:06:52 crc kubenswrapper[4757]: I1006 15:06:52.140262 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1514d2be-d117-4239-bb79-e73114d631fa-scripts\") pod \"ovn-northd-0\" (UID: \"1514d2be-d117-4239-bb79-e73114d631fa\") " pod="openstack/ovn-northd-0" Oct 06 15:06:52 crc kubenswrapper[4757]: I1006 15:06:52.140301 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1514d2be-d117-4239-bb79-e73114d631fa-config\") pod \"ovn-northd-0\" (UID: \"1514d2be-d117-4239-bb79-e73114d631fa\") " pod="openstack/ovn-northd-0" Oct 06 15:06:52 crc kubenswrapper[4757]: I1006 15:06:52.140498 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/1514d2be-d117-4239-bb79-e73114d631fa-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"1514d2be-d117-4239-bb79-e73114d631fa\") " pod="openstack/ovn-northd-0" Oct 06 15:06:52 crc kubenswrapper[4757]: I1006 15:06:52.140573 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/1514d2be-d117-4239-bb79-e73114d631fa-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"1514d2be-d117-4239-bb79-e73114d631fa\") " pod="openstack/ovn-northd-0" Oct 06 15:06:52 crc kubenswrapper[4757]: I1006 15:06:52.190227 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2bd44821-3471-4f95-a1b3-9d751852ed32" path="/var/lib/kubelet/pods/2bd44821-3471-4f95-a1b3-9d751852ed32/volumes" Oct 06 15:06:52 crc kubenswrapper[4757]: I1006 15:06:52.242520 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/1514d2be-d117-4239-bb79-e73114d631fa-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"1514d2be-d117-4239-bb79-e73114d631fa\") " pod="openstack/ovn-northd-0" Oct 06 15:06:52 crc kubenswrapper[4757]: I1006 15:06:52.242576 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5zs4k\" (UniqueName: \"kubernetes.io/projected/1514d2be-d117-4239-bb79-e73114d631fa-kube-api-access-5zs4k\") pod \"ovn-northd-0\" (UID: \"1514d2be-d117-4239-bb79-e73114d631fa\") " pod="openstack/ovn-northd-0" Oct 06 15:06:52 crc kubenswrapper[4757]: I1006 15:06:52.242596 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1514d2be-d117-4239-bb79-e73114d631fa-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"1514d2be-d117-4239-bb79-e73114d631fa\") " pod="openstack/ovn-northd-0" Oct 06 15:06:52 crc kubenswrapper[4757]: I1006 15:06:52.242622 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1514d2be-d117-4239-bb79-e73114d631fa-scripts\") pod \"ovn-northd-0\" (UID: \"1514d2be-d117-4239-bb79-e73114d631fa\") " pod="openstack/ovn-northd-0" Oct 06 15:06:52 crc kubenswrapper[4757]: I1006 15:06:52.242640 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1514d2be-d117-4239-bb79-e73114d631fa-config\") pod \"ovn-northd-0\" (UID: \"1514d2be-d117-4239-bb79-e73114d631fa\") " pod="openstack/ovn-northd-0" Oct 06 15:06:52 crc kubenswrapper[4757]: I1006 15:06:52.242667 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/1514d2be-d117-4239-bb79-e73114d631fa-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"1514d2be-d117-4239-bb79-e73114d631fa\") " pod="openstack/ovn-northd-0" Oct 06 15:06:52 crc kubenswrapper[4757]: I1006 15:06:52.242684 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/1514d2be-d117-4239-bb79-e73114d631fa-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"1514d2be-d117-4239-bb79-e73114d631fa\") " pod="openstack/ovn-northd-0" Oct 06 15:06:52 crc kubenswrapper[4757]: I1006 15:06:52.243605 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/1514d2be-d117-4239-bb79-e73114d631fa-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"1514d2be-d117-4239-bb79-e73114d631fa\") " pod="openstack/ovn-northd-0" Oct 06 15:06:52 crc kubenswrapper[4757]: I1006 15:06:52.244041 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1514d2be-d117-4239-bb79-e73114d631fa-scripts\") pod \"ovn-northd-0\" (UID: \"1514d2be-d117-4239-bb79-e73114d631fa\") " pod="openstack/ovn-northd-0" Oct 06 15:06:52 crc kubenswrapper[4757]: I1006 15:06:52.244368 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1514d2be-d117-4239-bb79-e73114d631fa-config\") pod \"ovn-northd-0\" (UID: \"1514d2be-d117-4239-bb79-e73114d631fa\") " pod="openstack/ovn-northd-0" Oct 06 15:06:52 crc kubenswrapper[4757]: I1006 15:06:52.247738 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/1514d2be-d117-4239-bb79-e73114d631fa-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"1514d2be-d117-4239-bb79-e73114d631fa\") " pod="openstack/ovn-northd-0" Oct 06 15:06:52 crc kubenswrapper[4757]: I1006 15:06:52.247768 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1514d2be-d117-4239-bb79-e73114d631fa-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"1514d2be-d117-4239-bb79-e73114d631fa\") " pod="openstack/ovn-northd-0" Oct 06 15:06:52 crc kubenswrapper[4757]: I1006 15:06:52.249778 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/1514d2be-d117-4239-bb79-e73114d631fa-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"1514d2be-d117-4239-bb79-e73114d631fa\") " pod="openstack/ovn-northd-0" Oct 06 15:06:52 crc kubenswrapper[4757]: I1006 15:06:52.260831 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5zs4k\" (UniqueName: \"kubernetes.io/projected/1514d2be-d117-4239-bb79-e73114d631fa-kube-api-access-5zs4k\") pod \"ovn-northd-0\" (UID: \"1514d2be-d117-4239-bb79-e73114d631fa\") " pod="openstack/ovn-northd-0" Oct 06 15:06:52 crc kubenswrapper[4757]: I1006 15:06:52.323294 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Oct 06 15:06:52 crc kubenswrapper[4757]: I1006 15:06:52.779502 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Oct 06 15:06:52 crc kubenswrapper[4757]: W1006 15:06:52.784791 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1514d2be_d117_4239_bb79_e73114d631fa.slice/crio-1faa0a712a65a4b5a0a386823ac8f838a7aebb03e898b363a6a48da4efc1ce36 WatchSource:0}: Error finding container 1faa0a712a65a4b5a0a386823ac8f838a7aebb03e898b363a6a48da4efc1ce36: Status 404 returned error can't find the container with id 1faa0a712a65a4b5a0a386823ac8f838a7aebb03e898b363a6a48da4efc1ce36 Oct 06 15:06:53 crc kubenswrapper[4757]: I1006 15:06:53.345680 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"1514d2be-d117-4239-bb79-e73114d631fa","Type":"ContainerStarted","Data":"b2eaa8ffaeb4801ed7517efb44a8de66e18908e3cc5379a84b09c5a3592a72e3"} Oct 06 15:06:53 crc kubenswrapper[4757]: I1006 15:06:53.345962 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"1514d2be-d117-4239-bb79-e73114d631fa","Type":"ContainerStarted","Data":"2b172d99ed0e89ea9c83ac39588e74cfd57388cc4792acf2f4371ff1f1bb03bd"} Oct 06 15:06:53 crc kubenswrapper[4757]: I1006 15:06:53.345971 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"1514d2be-d117-4239-bb79-e73114d631fa","Type":"ContainerStarted","Data":"1faa0a712a65a4b5a0a386823ac8f838a7aebb03e898b363a6a48da4efc1ce36"} Oct 06 15:06:53 crc kubenswrapper[4757]: I1006 15:06:53.346282 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Oct 06 15:06:53 crc kubenswrapper[4757]: I1006 15:06:53.373378 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=2.373351841 podStartE2EDuration="2.373351841s" podCreationTimestamp="2025-10-06 15:06:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 15:06:53.369584469 +0000 UTC m=+5301.866903026" watchObservedRunningTime="2025-10-06 15:06:53.373351841 +0000 UTC m=+5301.870670388" Oct 06 15:06:57 crc kubenswrapper[4757]: I1006 15:06:57.174327 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-phk2f"] Oct 06 15:06:57 crc kubenswrapper[4757]: I1006 15:06:57.175998 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-phk2f" Oct 06 15:06:57 crc kubenswrapper[4757]: I1006 15:06:57.181750 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-phk2f"] Oct 06 15:06:57 crc kubenswrapper[4757]: I1006 15:06:57.328560 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wtzr2\" (UniqueName: \"kubernetes.io/projected/e6d9f5c5-06c1-4fdf-99be-31cd1bc00394-kube-api-access-wtzr2\") pod \"keystone-db-create-phk2f\" (UID: \"e6d9f5c5-06c1-4fdf-99be-31cd1bc00394\") " pod="openstack/keystone-db-create-phk2f" Oct 06 15:06:57 crc kubenswrapper[4757]: I1006 15:06:57.430434 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wtzr2\" (UniqueName: \"kubernetes.io/projected/e6d9f5c5-06c1-4fdf-99be-31cd1bc00394-kube-api-access-wtzr2\") pod \"keystone-db-create-phk2f\" (UID: \"e6d9f5c5-06c1-4fdf-99be-31cd1bc00394\") " pod="openstack/keystone-db-create-phk2f" Oct 06 15:06:57 crc kubenswrapper[4757]: I1006 15:06:57.452136 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wtzr2\" (UniqueName: \"kubernetes.io/projected/e6d9f5c5-06c1-4fdf-99be-31cd1bc00394-kube-api-access-wtzr2\") pod \"keystone-db-create-phk2f\" (UID: \"e6d9f5c5-06c1-4fdf-99be-31cd1bc00394\") " pod="openstack/keystone-db-create-phk2f" Oct 06 15:06:57 crc kubenswrapper[4757]: I1006 15:06:57.502589 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-phk2f" Oct 06 15:06:58 crc kubenswrapper[4757]: I1006 15:06:58.060634 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-phk2f"] Oct 06 15:06:58 crc kubenswrapper[4757]: I1006 15:06:58.391887 4757 generic.go:334] "Generic (PLEG): container finished" podID="e6d9f5c5-06c1-4fdf-99be-31cd1bc00394" containerID="b04fd2f1c41fba26a5362c94e6ce7af9a8cc429a99d19aad799e2c6e7650f018" exitCode=0 Oct 06 15:06:58 crc kubenswrapper[4757]: I1006 15:06:58.392137 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-phk2f" event={"ID":"e6d9f5c5-06c1-4fdf-99be-31cd1bc00394","Type":"ContainerDied","Data":"b04fd2f1c41fba26a5362c94e6ce7af9a8cc429a99d19aad799e2c6e7650f018"} Oct 06 15:06:58 crc kubenswrapper[4757]: I1006 15:06:58.392227 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-phk2f" event={"ID":"e6d9f5c5-06c1-4fdf-99be-31cd1bc00394","Type":"ContainerStarted","Data":"b6545f353305349b403ec25fd100c2bce60933bdc9775fb97aff94d3ba4e2340"} Oct 06 15:06:59 crc kubenswrapper[4757]: I1006 15:06:59.732964 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-phk2f" Oct 06 15:06:59 crc kubenswrapper[4757]: I1006 15:06:59.875035 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wtzr2\" (UniqueName: \"kubernetes.io/projected/e6d9f5c5-06c1-4fdf-99be-31cd1bc00394-kube-api-access-wtzr2\") pod \"e6d9f5c5-06c1-4fdf-99be-31cd1bc00394\" (UID: \"e6d9f5c5-06c1-4fdf-99be-31cd1bc00394\") " Oct 06 15:06:59 crc kubenswrapper[4757]: I1006 15:06:59.882520 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e6d9f5c5-06c1-4fdf-99be-31cd1bc00394-kube-api-access-wtzr2" (OuterVolumeSpecName: "kube-api-access-wtzr2") pod "e6d9f5c5-06c1-4fdf-99be-31cd1bc00394" (UID: "e6d9f5c5-06c1-4fdf-99be-31cd1bc00394"). InnerVolumeSpecName "kube-api-access-wtzr2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 15:06:59 crc kubenswrapper[4757]: I1006 15:06:59.977011 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wtzr2\" (UniqueName: \"kubernetes.io/projected/e6d9f5c5-06c1-4fdf-99be-31cd1bc00394-kube-api-access-wtzr2\") on node \"crc\" DevicePath \"\"" Oct 06 15:07:00 crc kubenswrapper[4757]: I1006 15:07:00.413975 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-phk2f" event={"ID":"e6d9f5c5-06c1-4fdf-99be-31cd1bc00394","Type":"ContainerDied","Data":"b6545f353305349b403ec25fd100c2bce60933bdc9775fb97aff94d3ba4e2340"} Oct 06 15:07:00 crc kubenswrapper[4757]: I1006 15:07:00.414411 4757 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b6545f353305349b403ec25fd100c2bce60933bdc9775fb97aff94d3ba4e2340" Oct 06 15:07:00 crc kubenswrapper[4757]: I1006 15:07:00.414070 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-phk2f" Oct 06 15:07:07 crc kubenswrapper[4757]: I1006 15:07:07.272536 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-b3d9-account-create-sfxqf"] Oct 06 15:07:07 crc kubenswrapper[4757]: E1006 15:07:07.273763 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e6d9f5c5-06c1-4fdf-99be-31cd1bc00394" containerName="mariadb-database-create" Oct 06 15:07:07 crc kubenswrapper[4757]: I1006 15:07:07.273784 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="e6d9f5c5-06c1-4fdf-99be-31cd1bc00394" containerName="mariadb-database-create" Oct 06 15:07:07 crc kubenswrapper[4757]: I1006 15:07:07.274039 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="e6d9f5c5-06c1-4fdf-99be-31cd1bc00394" containerName="mariadb-database-create" Oct 06 15:07:07 crc kubenswrapper[4757]: I1006 15:07:07.274900 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-b3d9-account-create-sfxqf" Oct 06 15:07:07 crc kubenswrapper[4757]: I1006 15:07:07.278114 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Oct 06 15:07:07 crc kubenswrapper[4757]: I1006 15:07:07.289603 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-b3d9-account-create-sfxqf"] Oct 06 15:07:07 crc kubenswrapper[4757]: I1006 15:07:07.380212 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Oct 06 15:07:07 crc kubenswrapper[4757]: I1006 15:07:07.412947 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gb7xj\" (UniqueName: \"kubernetes.io/projected/698f058d-bce6-40a2-ad66-9cb2a8990e42-kube-api-access-gb7xj\") pod \"keystone-b3d9-account-create-sfxqf\" (UID: \"698f058d-bce6-40a2-ad66-9cb2a8990e42\") " pod="openstack/keystone-b3d9-account-create-sfxqf" Oct 06 15:07:07 crc kubenswrapper[4757]: I1006 15:07:07.515010 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gb7xj\" (UniqueName: \"kubernetes.io/projected/698f058d-bce6-40a2-ad66-9cb2a8990e42-kube-api-access-gb7xj\") pod \"keystone-b3d9-account-create-sfxqf\" (UID: \"698f058d-bce6-40a2-ad66-9cb2a8990e42\") " pod="openstack/keystone-b3d9-account-create-sfxqf" Oct 06 15:07:07 crc kubenswrapper[4757]: I1006 15:07:07.532789 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gb7xj\" (UniqueName: \"kubernetes.io/projected/698f058d-bce6-40a2-ad66-9cb2a8990e42-kube-api-access-gb7xj\") pod \"keystone-b3d9-account-create-sfxqf\" (UID: \"698f058d-bce6-40a2-ad66-9cb2a8990e42\") " pod="openstack/keystone-b3d9-account-create-sfxqf" Oct 06 15:07:07 crc kubenswrapper[4757]: I1006 15:07:07.594228 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-b3d9-account-create-sfxqf" Oct 06 15:07:08 crc kubenswrapper[4757]: I1006 15:07:08.051228 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-b3d9-account-create-sfxqf"] Oct 06 15:07:08 crc kubenswrapper[4757]: I1006 15:07:08.486792 4757 generic.go:334] "Generic (PLEG): container finished" podID="698f058d-bce6-40a2-ad66-9cb2a8990e42" containerID="0e985ccff7137a04b31b7e56062fd78965b722a1f2459cb5aa58ee1aa0d49be6" exitCode=0 Oct 06 15:07:08 crc kubenswrapper[4757]: I1006 15:07:08.486853 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-b3d9-account-create-sfxqf" event={"ID":"698f058d-bce6-40a2-ad66-9cb2a8990e42","Type":"ContainerDied","Data":"0e985ccff7137a04b31b7e56062fd78965b722a1f2459cb5aa58ee1aa0d49be6"} Oct 06 15:07:08 crc kubenswrapper[4757]: I1006 15:07:08.486893 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-b3d9-account-create-sfxqf" event={"ID":"698f058d-bce6-40a2-ad66-9cb2a8990e42","Type":"ContainerStarted","Data":"730b9c31e958a9ed39f68870f69d150c3e7ea9e04cde2e78f23acbf0ffb0e2ce"} Oct 06 15:07:09 crc kubenswrapper[4757]: I1006 15:07:09.866765 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-b3d9-account-create-sfxqf" Oct 06 15:07:09 crc kubenswrapper[4757]: I1006 15:07:09.962912 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gb7xj\" (UniqueName: \"kubernetes.io/projected/698f058d-bce6-40a2-ad66-9cb2a8990e42-kube-api-access-gb7xj\") pod \"698f058d-bce6-40a2-ad66-9cb2a8990e42\" (UID: \"698f058d-bce6-40a2-ad66-9cb2a8990e42\") " Oct 06 15:07:09 crc kubenswrapper[4757]: I1006 15:07:09.971370 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/698f058d-bce6-40a2-ad66-9cb2a8990e42-kube-api-access-gb7xj" (OuterVolumeSpecName: "kube-api-access-gb7xj") pod "698f058d-bce6-40a2-ad66-9cb2a8990e42" (UID: "698f058d-bce6-40a2-ad66-9cb2a8990e42"). InnerVolumeSpecName "kube-api-access-gb7xj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 15:07:10 crc kubenswrapper[4757]: I1006 15:07:10.067640 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gb7xj\" (UniqueName: \"kubernetes.io/projected/698f058d-bce6-40a2-ad66-9cb2a8990e42-kube-api-access-gb7xj\") on node \"crc\" DevicePath \"\"" Oct 06 15:07:10 crc kubenswrapper[4757]: I1006 15:07:10.507041 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-b3d9-account-create-sfxqf" event={"ID":"698f058d-bce6-40a2-ad66-9cb2a8990e42","Type":"ContainerDied","Data":"730b9c31e958a9ed39f68870f69d150c3e7ea9e04cde2e78f23acbf0ffb0e2ce"} Oct 06 15:07:10 crc kubenswrapper[4757]: I1006 15:07:10.507119 4757 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="730b9c31e958a9ed39f68870f69d150c3e7ea9e04cde2e78f23acbf0ffb0e2ce" Oct 06 15:07:10 crc kubenswrapper[4757]: I1006 15:07:10.507190 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-b3d9-account-create-sfxqf" Oct 06 15:07:12 crc kubenswrapper[4757]: I1006 15:07:12.671623 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-qmc6b"] Oct 06 15:07:12 crc kubenswrapper[4757]: E1006 15:07:12.672744 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="698f058d-bce6-40a2-ad66-9cb2a8990e42" containerName="mariadb-account-create" Oct 06 15:07:12 crc kubenswrapper[4757]: I1006 15:07:12.672763 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="698f058d-bce6-40a2-ad66-9cb2a8990e42" containerName="mariadb-account-create" Oct 06 15:07:12 crc kubenswrapper[4757]: I1006 15:07:12.672955 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="698f058d-bce6-40a2-ad66-9cb2a8990e42" containerName="mariadb-account-create" Oct 06 15:07:12 crc kubenswrapper[4757]: I1006 15:07:12.673550 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-qmc6b" Oct 06 15:07:12 crc kubenswrapper[4757]: I1006 15:07:12.680424 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Oct 06 15:07:12 crc kubenswrapper[4757]: I1006 15:07:12.680979 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Oct 06 15:07:12 crc kubenswrapper[4757]: I1006 15:07:12.681028 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Oct 06 15:07:12 crc kubenswrapper[4757]: I1006 15:07:12.680996 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-8hbt2" Oct 06 15:07:12 crc kubenswrapper[4757]: I1006 15:07:12.693183 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-qmc6b"] Oct 06 15:07:12 crc kubenswrapper[4757]: I1006 15:07:12.819133 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r87hl\" (UniqueName: \"kubernetes.io/projected/8a6135bf-bb76-48a9-88a5-b6271ebc3bf9-kube-api-access-r87hl\") pod \"keystone-db-sync-qmc6b\" (UID: \"8a6135bf-bb76-48a9-88a5-b6271ebc3bf9\") " pod="openstack/keystone-db-sync-qmc6b" Oct 06 15:07:12 crc kubenswrapper[4757]: I1006 15:07:12.819194 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8a6135bf-bb76-48a9-88a5-b6271ebc3bf9-config-data\") pod \"keystone-db-sync-qmc6b\" (UID: \"8a6135bf-bb76-48a9-88a5-b6271ebc3bf9\") " pod="openstack/keystone-db-sync-qmc6b" Oct 06 15:07:12 crc kubenswrapper[4757]: I1006 15:07:12.819260 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a6135bf-bb76-48a9-88a5-b6271ebc3bf9-combined-ca-bundle\") pod \"keystone-db-sync-qmc6b\" (UID: \"8a6135bf-bb76-48a9-88a5-b6271ebc3bf9\") " pod="openstack/keystone-db-sync-qmc6b" Oct 06 15:07:12 crc kubenswrapper[4757]: I1006 15:07:12.920593 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a6135bf-bb76-48a9-88a5-b6271ebc3bf9-combined-ca-bundle\") pod \"keystone-db-sync-qmc6b\" (UID: \"8a6135bf-bb76-48a9-88a5-b6271ebc3bf9\") " pod="openstack/keystone-db-sync-qmc6b" Oct 06 15:07:12 crc kubenswrapper[4757]: I1006 15:07:12.920842 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r87hl\" (UniqueName: \"kubernetes.io/projected/8a6135bf-bb76-48a9-88a5-b6271ebc3bf9-kube-api-access-r87hl\") pod \"keystone-db-sync-qmc6b\" (UID: \"8a6135bf-bb76-48a9-88a5-b6271ebc3bf9\") " pod="openstack/keystone-db-sync-qmc6b" Oct 06 15:07:12 crc kubenswrapper[4757]: I1006 15:07:12.921041 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8a6135bf-bb76-48a9-88a5-b6271ebc3bf9-config-data\") pod \"keystone-db-sync-qmc6b\" (UID: \"8a6135bf-bb76-48a9-88a5-b6271ebc3bf9\") " pod="openstack/keystone-db-sync-qmc6b" Oct 06 15:07:12 crc kubenswrapper[4757]: I1006 15:07:12.927648 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a6135bf-bb76-48a9-88a5-b6271ebc3bf9-combined-ca-bundle\") pod \"keystone-db-sync-qmc6b\" (UID: \"8a6135bf-bb76-48a9-88a5-b6271ebc3bf9\") " pod="openstack/keystone-db-sync-qmc6b" Oct 06 15:07:12 crc kubenswrapper[4757]: I1006 15:07:12.929938 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8a6135bf-bb76-48a9-88a5-b6271ebc3bf9-config-data\") pod \"keystone-db-sync-qmc6b\" (UID: \"8a6135bf-bb76-48a9-88a5-b6271ebc3bf9\") " pod="openstack/keystone-db-sync-qmc6b" Oct 06 15:07:12 crc kubenswrapper[4757]: I1006 15:07:12.939815 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r87hl\" (UniqueName: \"kubernetes.io/projected/8a6135bf-bb76-48a9-88a5-b6271ebc3bf9-kube-api-access-r87hl\") pod \"keystone-db-sync-qmc6b\" (UID: \"8a6135bf-bb76-48a9-88a5-b6271ebc3bf9\") " pod="openstack/keystone-db-sync-qmc6b" Oct 06 15:07:12 crc kubenswrapper[4757]: I1006 15:07:12.994894 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-qmc6b" Oct 06 15:07:13 crc kubenswrapper[4757]: I1006 15:07:13.459283 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-qmc6b"] Oct 06 15:07:13 crc kubenswrapper[4757]: I1006 15:07:13.536274 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-qmc6b" event={"ID":"8a6135bf-bb76-48a9-88a5-b6271ebc3bf9","Type":"ContainerStarted","Data":"fa9f52f7944af2d11db5f9a3d7be678d399b582136edd11cf8c3f1db84478421"} Oct 06 15:07:14 crc kubenswrapper[4757]: I1006 15:07:14.550619 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-qmc6b" event={"ID":"8a6135bf-bb76-48a9-88a5-b6271ebc3bf9","Type":"ContainerStarted","Data":"201963f22ea1299ade21109b13ea451618acaa4fcf5810a182449a48b1b1823e"} Oct 06 15:07:14 crc kubenswrapper[4757]: I1006 15:07:14.583489 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-qmc6b" podStartSLOduration=2.583466769 podStartE2EDuration="2.583466769s" podCreationTimestamp="2025-10-06 15:07:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 15:07:14.580285017 +0000 UTC m=+5323.077603544" watchObservedRunningTime="2025-10-06 15:07:14.583466769 +0000 UTC m=+5323.080785326" Oct 06 15:07:15 crc kubenswrapper[4757]: I1006 15:07:15.563064 4757 generic.go:334] "Generic (PLEG): container finished" podID="8a6135bf-bb76-48a9-88a5-b6271ebc3bf9" containerID="201963f22ea1299ade21109b13ea451618acaa4fcf5810a182449a48b1b1823e" exitCode=0 Oct 06 15:07:15 crc kubenswrapper[4757]: I1006 15:07:15.563172 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-qmc6b" event={"ID":"8a6135bf-bb76-48a9-88a5-b6271ebc3bf9","Type":"ContainerDied","Data":"201963f22ea1299ade21109b13ea451618acaa4fcf5810a182449a48b1b1823e"} Oct 06 15:07:16 crc kubenswrapper[4757]: I1006 15:07:16.952069 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-qmc6b" Oct 06 15:07:17 crc kubenswrapper[4757]: I1006 15:07:17.091432 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a6135bf-bb76-48a9-88a5-b6271ebc3bf9-combined-ca-bundle\") pod \"8a6135bf-bb76-48a9-88a5-b6271ebc3bf9\" (UID: \"8a6135bf-bb76-48a9-88a5-b6271ebc3bf9\") " Oct 06 15:07:17 crc kubenswrapper[4757]: I1006 15:07:17.091536 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r87hl\" (UniqueName: \"kubernetes.io/projected/8a6135bf-bb76-48a9-88a5-b6271ebc3bf9-kube-api-access-r87hl\") pod \"8a6135bf-bb76-48a9-88a5-b6271ebc3bf9\" (UID: \"8a6135bf-bb76-48a9-88a5-b6271ebc3bf9\") " Oct 06 15:07:17 crc kubenswrapper[4757]: I1006 15:07:17.091616 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8a6135bf-bb76-48a9-88a5-b6271ebc3bf9-config-data\") pod \"8a6135bf-bb76-48a9-88a5-b6271ebc3bf9\" (UID: \"8a6135bf-bb76-48a9-88a5-b6271ebc3bf9\") " Oct 06 15:07:17 crc kubenswrapper[4757]: I1006 15:07:17.096954 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8a6135bf-bb76-48a9-88a5-b6271ebc3bf9-kube-api-access-r87hl" (OuterVolumeSpecName: "kube-api-access-r87hl") pod "8a6135bf-bb76-48a9-88a5-b6271ebc3bf9" (UID: "8a6135bf-bb76-48a9-88a5-b6271ebc3bf9"). InnerVolumeSpecName "kube-api-access-r87hl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 15:07:17 crc kubenswrapper[4757]: I1006 15:07:17.121709 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8a6135bf-bb76-48a9-88a5-b6271ebc3bf9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8a6135bf-bb76-48a9-88a5-b6271ebc3bf9" (UID: "8a6135bf-bb76-48a9-88a5-b6271ebc3bf9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 15:07:17 crc kubenswrapper[4757]: I1006 15:07:17.138280 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8a6135bf-bb76-48a9-88a5-b6271ebc3bf9-config-data" (OuterVolumeSpecName: "config-data") pod "8a6135bf-bb76-48a9-88a5-b6271ebc3bf9" (UID: "8a6135bf-bb76-48a9-88a5-b6271ebc3bf9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 15:07:17 crc kubenswrapper[4757]: I1006 15:07:17.194183 4757 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8a6135bf-bb76-48a9-88a5-b6271ebc3bf9-config-data\") on node \"crc\" DevicePath \"\"" Oct 06 15:07:17 crc kubenswrapper[4757]: I1006 15:07:17.194232 4757 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a6135bf-bb76-48a9-88a5-b6271ebc3bf9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 06 15:07:17 crc kubenswrapper[4757]: I1006 15:07:17.194245 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r87hl\" (UniqueName: \"kubernetes.io/projected/8a6135bf-bb76-48a9-88a5-b6271ebc3bf9-kube-api-access-r87hl\") on node \"crc\" DevicePath \"\"" Oct 06 15:07:17 crc kubenswrapper[4757]: I1006 15:07:17.588216 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-qmc6b" event={"ID":"8a6135bf-bb76-48a9-88a5-b6271ebc3bf9","Type":"ContainerDied","Data":"fa9f52f7944af2d11db5f9a3d7be678d399b582136edd11cf8c3f1db84478421"} Oct 06 15:07:17 crc kubenswrapper[4757]: I1006 15:07:17.588267 4757 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fa9f52f7944af2d11db5f9a3d7be678d399b582136edd11cf8c3f1db84478421" Oct 06 15:07:17 crc kubenswrapper[4757]: I1006 15:07:17.588304 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-qmc6b" Oct 06 15:07:17 crc kubenswrapper[4757]: I1006 15:07:17.830754 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-cf9d54b65-rdpsv"] Oct 06 15:07:17 crc kubenswrapper[4757]: E1006 15:07:17.831654 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8a6135bf-bb76-48a9-88a5-b6271ebc3bf9" containerName="keystone-db-sync" Oct 06 15:07:17 crc kubenswrapper[4757]: I1006 15:07:17.831678 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="8a6135bf-bb76-48a9-88a5-b6271ebc3bf9" containerName="keystone-db-sync" Oct 06 15:07:17 crc kubenswrapper[4757]: I1006 15:07:17.831917 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="8a6135bf-bb76-48a9-88a5-b6271ebc3bf9" containerName="keystone-db-sync" Oct 06 15:07:17 crc kubenswrapper[4757]: I1006 15:07:17.833238 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-cf9d54b65-rdpsv" Oct 06 15:07:17 crc kubenswrapper[4757]: I1006 15:07:17.850613 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-cf9d54b65-rdpsv"] Oct 06 15:07:17 crc kubenswrapper[4757]: I1006 15:07:17.888506 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-txzc6"] Oct 06 15:07:17 crc kubenswrapper[4757]: I1006 15:07:17.889978 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-txzc6" Oct 06 15:07:17 crc kubenswrapper[4757]: I1006 15:07:17.892078 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Oct 06 15:07:17 crc kubenswrapper[4757]: I1006 15:07:17.892396 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Oct 06 15:07:17 crc kubenswrapper[4757]: I1006 15:07:17.892430 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Oct 06 15:07:17 crc kubenswrapper[4757]: I1006 15:07:17.892573 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-8hbt2" Oct 06 15:07:17 crc kubenswrapper[4757]: I1006 15:07:17.896436 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-txzc6"] Oct 06 15:07:17 crc kubenswrapper[4757]: I1006 15:07:17.906362 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8c99d116-9378-471e-a119-1a468f151a77-ovsdbserver-nb\") pod \"dnsmasq-dns-cf9d54b65-rdpsv\" (UID: \"8c99d116-9378-471e-a119-1a468f151a77\") " pod="openstack/dnsmasq-dns-cf9d54b65-rdpsv" Oct 06 15:07:17 crc kubenswrapper[4757]: I1006 15:07:17.906478 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8c99d116-9378-471e-a119-1a468f151a77-config\") pod \"dnsmasq-dns-cf9d54b65-rdpsv\" (UID: \"8c99d116-9378-471e-a119-1a468f151a77\") " pod="openstack/dnsmasq-dns-cf9d54b65-rdpsv" Oct 06 15:07:17 crc kubenswrapper[4757]: I1006 15:07:17.906523 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8c99d116-9378-471e-a119-1a468f151a77-ovsdbserver-sb\") pod \"dnsmasq-dns-cf9d54b65-rdpsv\" (UID: \"8c99d116-9378-471e-a119-1a468f151a77\") " pod="openstack/dnsmasq-dns-cf9d54b65-rdpsv" Oct 06 15:07:17 crc kubenswrapper[4757]: I1006 15:07:17.906575 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p4php\" (UniqueName: \"kubernetes.io/projected/8c99d116-9378-471e-a119-1a468f151a77-kube-api-access-p4php\") pod \"dnsmasq-dns-cf9d54b65-rdpsv\" (UID: \"8c99d116-9378-471e-a119-1a468f151a77\") " pod="openstack/dnsmasq-dns-cf9d54b65-rdpsv" Oct 06 15:07:17 crc kubenswrapper[4757]: I1006 15:07:17.906603 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8c99d116-9378-471e-a119-1a468f151a77-dns-svc\") pod \"dnsmasq-dns-cf9d54b65-rdpsv\" (UID: \"8c99d116-9378-471e-a119-1a468f151a77\") " pod="openstack/dnsmasq-dns-cf9d54b65-rdpsv" Oct 06 15:07:18 crc kubenswrapper[4757]: I1006 15:07:18.008563 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8c99d116-9378-471e-a119-1a468f151a77-ovsdbserver-sb\") pod \"dnsmasq-dns-cf9d54b65-rdpsv\" (UID: \"8c99d116-9378-471e-a119-1a468f151a77\") " pod="openstack/dnsmasq-dns-cf9d54b65-rdpsv" Oct 06 15:07:18 crc kubenswrapper[4757]: I1006 15:07:18.008626 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9756fa47-5175-400c-88f1-1129d417c1f8-combined-ca-bundle\") pod \"keystone-bootstrap-txzc6\" (UID: \"9756fa47-5175-400c-88f1-1129d417c1f8\") " pod="openstack/keystone-bootstrap-txzc6" Oct 06 15:07:18 crc kubenswrapper[4757]: I1006 15:07:18.008674 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p4php\" (UniqueName: \"kubernetes.io/projected/8c99d116-9378-471e-a119-1a468f151a77-kube-api-access-p4php\") pod \"dnsmasq-dns-cf9d54b65-rdpsv\" (UID: \"8c99d116-9378-471e-a119-1a468f151a77\") " pod="openstack/dnsmasq-dns-cf9d54b65-rdpsv" Oct 06 15:07:18 crc kubenswrapper[4757]: I1006 15:07:18.008779 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8c99d116-9378-471e-a119-1a468f151a77-dns-svc\") pod \"dnsmasq-dns-cf9d54b65-rdpsv\" (UID: \"8c99d116-9378-471e-a119-1a468f151a77\") " pod="openstack/dnsmasq-dns-cf9d54b65-rdpsv" Oct 06 15:07:18 crc kubenswrapper[4757]: I1006 15:07:18.008910 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/9756fa47-5175-400c-88f1-1129d417c1f8-fernet-keys\") pod \"keystone-bootstrap-txzc6\" (UID: \"9756fa47-5175-400c-88f1-1129d417c1f8\") " pod="openstack/keystone-bootstrap-txzc6" Oct 06 15:07:18 crc kubenswrapper[4757]: I1006 15:07:18.008941 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/9756fa47-5175-400c-88f1-1129d417c1f8-credential-keys\") pod \"keystone-bootstrap-txzc6\" (UID: \"9756fa47-5175-400c-88f1-1129d417c1f8\") " pod="openstack/keystone-bootstrap-txzc6" Oct 06 15:07:18 crc kubenswrapper[4757]: I1006 15:07:18.009063 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8c99d116-9378-471e-a119-1a468f151a77-ovsdbserver-nb\") pod \"dnsmasq-dns-cf9d54b65-rdpsv\" (UID: \"8c99d116-9378-471e-a119-1a468f151a77\") " pod="openstack/dnsmasq-dns-cf9d54b65-rdpsv" Oct 06 15:07:18 crc kubenswrapper[4757]: I1006 15:07:18.009274 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9756fa47-5175-400c-88f1-1129d417c1f8-config-data\") pod \"keystone-bootstrap-txzc6\" (UID: \"9756fa47-5175-400c-88f1-1129d417c1f8\") " pod="openstack/keystone-bootstrap-txzc6" Oct 06 15:07:18 crc kubenswrapper[4757]: I1006 15:07:18.009322 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9756fa47-5175-400c-88f1-1129d417c1f8-scripts\") pod \"keystone-bootstrap-txzc6\" (UID: \"9756fa47-5175-400c-88f1-1129d417c1f8\") " pod="openstack/keystone-bootstrap-txzc6" Oct 06 15:07:18 crc kubenswrapper[4757]: I1006 15:07:18.009381 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8c99d116-9378-471e-a119-1a468f151a77-config\") pod \"dnsmasq-dns-cf9d54b65-rdpsv\" (UID: \"8c99d116-9378-471e-a119-1a468f151a77\") " pod="openstack/dnsmasq-dns-cf9d54b65-rdpsv" Oct 06 15:07:18 crc kubenswrapper[4757]: I1006 15:07:18.009464 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qr7r6\" (UniqueName: \"kubernetes.io/projected/9756fa47-5175-400c-88f1-1129d417c1f8-kube-api-access-qr7r6\") pod \"keystone-bootstrap-txzc6\" (UID: \"9756fa47-5175-400c-88f1-1129d417c1f8\") " pod="openstack/keystone-bootstrap-txzc6" Oct 06 15:07:18 crc kubenswrapper[4757]: I1006 15:07:18.009755 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8c99d116-9378-471e-a119-1a468f151a77-ovsdbserver-sb\") pod \"dnsmasq-dns-cf9d54b65-rdpsv\" (UID: \"8c99d116-9378-471e-a119-1a468f151a77\") " pod="openstack/dnsmasq-dns-cf9d54b65-rdpsv" Oct 06 15:07:18 crc kubenswrapper[4757]: I1006 15:07:18.009797 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8c99d116-9378-471e-a119-1a468f151a77-ovsdbserver-nb\") pod \"dnsmasq-dns-cf9d54b65-rdpsv\" (UID: \"8c99d116-9378-471e-a119-1a468f151a77\") " pod="openstack/dnsmasq-dns-cf9d54b65-rdpsv" Oct 06 15:07:18 crc kubenswrapper[4757]: I1006 15:07:18.009806 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8c99d116-9378-471e-a119-1a468f151a77-dns-svc\") pod \"dnsmasq-dns-cf9d54b65-rdpsv\" (UID: \"8c99d116-9378-471e-a119-1a468f151a77\") " pod="openstack/dnsmasq-dns-cf9d54b65-rdpsv" Oct 06 15:07:18 crc kubenswrapper[4757]: I1006 15:07:18.010156 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8c99d116-9378-471e-a119-1a468f151a77-config\") pod \"dnsmasq-dns-cf9d54b65-rdpsv\" (UID: \"8c99d116-9378-471e-a119-1a468f151a77\") " pod="openstack/dnsmasq-dns-cf9d54b65-rdpsv" Oct 06 15:07:18 crc kubenswrapper[4757]: I1006 15:07:18.027632 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p4php\" (UniqueName: \"kubernetes.io/projected/8c99d116-9378-471e-a119-1a468f151a77-kube-api-access-p4php\") pod \"dnsmasq-dns-cf9d54b65-rdpsv\" (UID: \"8c99d116-9378-471e-a119-1a468f151a77\") " pod="openstack/dnsmasq-dns-cf9d54b65-rdpsv" Oct 06 15:07:18 crc kubenswrapper[4757]: I1006 15:07:18.111046 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/9756fa47-5175-400c-88f1-1129d417c1f8-fernet-keys\") pod \"keystone-bootstrap-txzc6\" (UID: \"9756fa47-5175-400c-88f1-1129d417c1f8\") " pod="openstack/keystone-bootstrap-txzc6" Oct 06 15:07:18 crc kubenswrapper[4757]: I1006 15:07:18.111111 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/9756fa47-5175-400c-88f1-1129d417c1f8-credential-keys\") pod \"keystone-bootstrap-txzc6\" (UID: \"9756fa47-5175-400c-88f1-1129d417c1f8\") " pod="openstack/keystone-bootstrap-txzc6" Oct 06 15:07:18 crc kubenswrapper[4757]: I1006 15:07:18.111203 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9756fa47-5175-400c-88f1-1129d417c1f8-config-data\") pod \"keystone-bootstrap-txzc6\" (UID: \"9756fa47-5175-400c-88f1-1129d417c1f8\") " pod="openstack/keystone-bootstrap-txzc6" Oct 06 15:07:18 crc kubenswrapper[4757]: I1006 15:07:18.111230 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9756fa47-5175-400c-88f1-1129d417c1f8-scripts\") pod \"keystone-bootstrap-txzc6\" (UID: \"9756fa47-5175-400c-88f1-1129d417c1f8\") " pod="openstack/keystone-bootstrap-txzc6" Oct 06 15:07:18 crc kubenswrapper[4757]: I1006 15:07:18.111270 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qr7r6\" (UniqueName: \"kubernetes.io/projected/9756fa47-5175-400c-88f1-1129d417c1f8-kube-api-access-qr7r6\") pod \"keystone-bootstrap-txzc6\" (UID: \"9756fa47-5175-400c-88f1-1129d417c1f8\") " pod="openstack/keystone-bootstrap-txzc6" Oct 06 15:07:18 crc kubenswrapper[4757]: I1006 15:07:18.111317 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9756fa47-5175-400c-88f1-1129d417c1f8-combined-ca-bundle\") pod \"keystone-bootstrap-txzc6\" (UID: \"9756fa47-5175-400c-88f1-1129d417c1f8\") " pod="openstack/keystone-bootstrap-txzc6" Oct 06 15:07:18 crc kubenswrapper[4757]: I1006 15:07:18.115218 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9756fa47-5175-400c-88f1-1129d417c1f8-scripts\") pod \"keystone-bootstrap-txzc6\" (UID: \"9756fa47-5175-400c-88f1-1129d417c1f8\") " pod="openstack/keystone-bootstrap-txzc6" Oct 06 15:07:18 crc kubenswrapper[4757]: I1006 15:07:18.115428 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/9756fa47-5175-400c-88f1-1129d417c1f8-credential-keys\") pod \"keystone-bootstrap-txzc6\" (UID: \"9756fa47-5175-400c-88f1-1129d417c1f8\") " pod="openstack/keystone-bootstrap-txzc6" Oct 06 15:07:18 crc kubenswrapper[4757]: I1006 15:07:18.115493 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9756fa47-5175-400c-88f1-1129d417c1f8-config-data\") pod \"keystone-bootstrap-txzc6\" (UID: \"9756fa47-5175-400c-88f1-1129d417c1f8\") " pod="openstack/keystone-bootstrap-txzc6" Oct 06 15:07:18 crc kubenswrapper[4757]: I1006 15:07:18.115885 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/9756fa47-5175-400c-88f1-1129d417c1f8-fernet-keys\") pod \"keystone-bootstrap-txzc6\" (UID: \"9756fa47-5175-400c-88f1-1129d417c1f8\") " pod="openstack/keystone-bootstrap-txzc6" Oct 06 15:07:18 crc kubenswrapper[4757]: I1006 15:07:18.117170 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9756fa47-5175-400c-88f1-1129d417c1f8-combined-ca-bundle\") pod \"keystone-bootstrap-txzc6\" (UID: \"9756fa47-5175-400c-88f1-1129d417c1f8\") " pod="openstack/keystone-bootstrap-txzc6" Oct 06 15:07:18 crc kubenswrapper[4757]: I1006 15:07:18.127190 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qr7r6\" (UniqueName: \"kubernetes.io/projected/9756fa47-5175-400c-88f1-1129d417c1f8-kube-api-access-qr7r6\") pod \"keystone-bootstrap-txzc6\" (UID: \"9756fa47-5175-400c-88f1-1129d417c1f8\") " pod="openstack/keystone-bootstrap-txzc6" Oct 06 15:07:18 crc kubenswrapper[4757]: I1006 15:07:18.151405 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-cf9d54b65-rdpsv" Oct 06 15:07:18 crc kubenswrapper[4757]: I1006 15:07:18.209068 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-txzc6" Oct 06 15:07:18 crc kubenswrapper[4757]: I1006 15:07:18.632053 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-cf9d54b65-rdpsv"] Oct 06 15:07:18 crc kubenswrapper[4757]: I1006 15:07:18.719143 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-txzc6"] Oct 06 15:07:19 crc kubenswrapper[4757]: I1006 15:07:19.604193 4757 generic.go:334] "Generic (PLEG): container finished" podID="8c99d116-9378-471e-a119-1a468f151a77" containerID="41a0f93b8e39ea17f7827f0f1cac766f35defa977ed4b48d1cf41ddf11d0b91d" exitCode=0 Oct 06 15:07:19 crc kubenswrapper[4757]: I1006 15:07:19.604250 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cf9d54b65-rdpsv" event={"ID":"8c99d116-9378-471e-a119-1a468f151a77","Type":"ContainerDied","Data":"41a0f93b8e39ea17f7827f0f1cac766f35defa977ed4b48d1cf41ddf11d0b91d"} Oct 06 15:07:19 crc kubenswrapper[4757]: I1006 15:07:19.605827 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cf9d54b65-rdpsv" event={"ID":"8c99d116-9378-471e-a119-1a468f151a77","Type":"ContainerStarted","Data":"86f73fb3b4fbeb456a6f34a939b54c0501ffc33bfe42af4f235f22e9050462a3"} Oct 06 15:07:19 crc kubenswrapper[4757]: I1006 15:07:19.608386 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-txzc6" event={"ID":"9756fa47-5175-400c-88f1-1129d417c1f8","Type":"ContainerStarted","Data":"7c9c8e4f6885c56ff2d317a82c7d5d2207ad6215bcc0660abf6517bdf99124d2"} Oct 06 15:07:19 crc kubenswrapper[4757]: I1006 15:07:19.608428 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-txzc6" event={"ID":"9756fa47-5175-400c-88f1-1129d417c1f8","Type":"ContainerStarted","Data":"e56fd22b7ebe47161ba0eeba5f921aed0a2d6685928f3ba67fa818ace5d91dc4"} Oct 06 15:07:20 crc kubenswrapper[4757]: I1006 15:07:20.619410 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cf9d54b65-rdpsv" event={"ID":"8c99d116-9378-471e-a119-1a468f151a77","Type":"ContainerStarted","Data":"f147cf0e982910f783624667339a228fb81806a50f34a3b26070524947194816"} Oct 06 15:07:20 crc kubenswrapper[4757]: I1006 15:07:20.653822 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-cf9d54b65-rdpsv" podStartSLOduration=3.653797053 podStartE2EDuration="3.653797053s" podCreationTimestamp="2025-10-06 15:07:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 15:07:20.647793979 +0000 UTC m=+5329.145112516" watchObservedRunningTime="2025-10-06 15:07:20.653797053 +0000 UTC m=+5329.151115590" Oct 06 15:07:20 crc kubenswrapper[4757]: I1006 15:07:20.655723 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-txzc6" podStartSLOduration=3.6557152349999997 podStartE2EDuration="3.655715235s" podCreationTimestamp="2025-10-06 15:07:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 15:07:19.654397628 +0000 UTC m=+5328.151716165" watchObservedRunningTime="2025-10-06 15:07:20.655715235 +0000 UTC m=+5329.153033772" Oct 06 15:07:21 crc kubenswrapper[4757]: I1006 15:07:21.626667 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-cf9d54b65-rdpsv" Oct 06 15:07:22 crc kubenswrapper[4757]: I1006 15:07:22.640047 4757 generic.go:334] "Generic (PLEG): container finished" podID="9756fa47-5175-400c-88f1-1129d417c1f8" containerID="7c9c8e4f6885c56ff2d317a82c7d5d2207ad6215bcc0660abf6517bdf99124d2" exitCode=0 Oct 06 15:07:22 crc kubenswrapper[4757]: I1006 15:07:22.640141 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-txzc6" event={"ID":"9756fa47-5175-400c-88f1-1129d417c1f8","Type":"ContainerDied","Data":"7c9c8e4f6885c56ff2d317a82c7d5d2207ad6215bcc0660abf6517bdf99124d2"} Oct 06 15:07:24 crc kubenswrapper[4757]: I1006 15:07:24.012442 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-txzc6" Oct 06 15:07:24 crc kubenswrapper[4757]: I1006 15:07:24.125969 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9756fa47-5175-400c-88f1-1129d417c1f8-scripts\") pod \"9756fa47-5175-400c-88f1-1129d417c1f8\" (UID: \"9756fa47-5175-400c-88f1-1129d417c1f8\") " Oct 06 15:07:24 crc kubenswrapper[4757]: I1006 15:07:24.126240 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/9756fa47-5175-400c-88f1-1129d417c1f8-fernet-keys\") pod \"9756fa47-5175-400c-88f1-1129d417c1f8\" (UID: \"9756fa47-5175-400c-88f1-1129d417c1f8\") " Oct 06 15:07:24 crc kubenswrapper[4757]: I1006 15:07:24.126319 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/9756fa47-5175-400c-88f1-1129d417c1f8-credential-keys\") pod \"9756fa47-5175-400c-88f1-1129d417c1f8\" (UID: \"9756fa47-5175-400c-88f1-1129d417c1f8\") " Oct 06 15:07:24 crc kubenswrapper[4757]: I1006 15:07:24.126391 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9756fa47-5175-400c-88f1-1129d417c1f8-config-data\") pod \"9756fa47-5175-400c-88f1-1129d417c1f8\" (UID: \"9756fa47-5175-400c-88f1-1129d417c1f8\") " Oct 06 15:07:24 crc kubenswrapper[4757]: I1006 15:07:24.126431 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qr7r6\" (UniqueName: \"kubernetes.io/projected/9756fa47-5175-400c-88f1-1129d417c1f8-kube-api-access-qr7r6\") pod \"9756fa47-5175-400c-88f1-1129d417c1f8\" (UID: \"9756fa47-5175-400c-88f1-1129d417c1f8\") " Oct 06 15:07:24 crc kubenswrapper[4757]: I1006 15:07:24.126469 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9756fa47-5175-400c-88f1-1129d417c1f8-combined-ca-bundle\") pod \"9756fa47-5175-400c-88f1-1129d417c1f8\" (UID: \"9756fa47-5175-400c-88f1-1129d417c1f8\") " Oct 06 15:07:24 crc kubenswrapper[4757]: I1006 15:07:24.131446 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9756fa47-5175-400c-88f1-1129d417c1f8-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "9756fa47-5175-400c-88f1-1129d417c1f8" (UID: "9756fa47-5175-400c-88f1-1129d417c1f8"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 15:07:24 crc kubenswrapper[4757]: I1006 15:07:24.131891 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9756fa47-5175-400c-88f1-1129d417c1f8-kube-api-access-qr7r6" (OuterVolumeSpecName: "kube-api-access-qr7r6") pod "9756fa47-5175-400c-88f1-1129d417c1f8" (UID: "9756fa47-5175-400c-88f1-1129d417c1f8"). InnerVolumeSpecName "kube-api-access-qr7r6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 15:07:24 crc kubenswrapper[4757]: I1006 15:07:24.131953 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9756fa47-5175-400c-88f1-1129d417c1f8-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "9756fa47-5175-400c-88f1-1129d417c1f8" (UID: "9756fa47-5175-400c-88f1-1129d417c1f8"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 15:07:24 crc kubenswrapper[4757]: I1006 15:07:24.133395 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9756fa47-5175-400c-88f1-1129d417c1f8-scripts" (OuterVolumeSpecName: "scripts") pod "9756fa47-5175-400c-88f1-1129d417c1f8" (UID: "9756fa47-5175-400c-88f1-1129d417c1f8"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 15:07:24 crc kubenswrapper[4757]: I1006 15:07:24.152359 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9756fa47-5175-400c-88f1-1129d417c1f8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9756fa47-5175-400c-88f1-1129d417c1f8" (UID: "9756fa47-5175-400c-88f1-1129d417c1f8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 15:07:24 crc kubenswrapper[4757]: I1006 15:07:24.159881 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9756fa47-5175-400c-88f1-1129d417c1f8-config-data" (OuterVolumeSpecName: "config-data") pod "9756fa47-5175-400c-88f1-1129d417c1f8" (UID: "9756fa47-5175-400c-88f1-1129d417c1f8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 15:07:24 crc kubenswrapper[4757]: I1006 15:07:24.228950 4757 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9756fa47-5175-400c-88f1-1129d417c1f8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 06 15:07:24 crc kubenswrapper[4757]: I1006 15:07:24.229208 4757 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9756fa47-5175-400c-88f1-1129d417c1f8-scripts\") on node \"crc\" DevicePath \"\"" Oct 06 15:07:24 crc kubenswrapper[4757]: I1006 15:07:24.229329 4757 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/9756fa47-5175-400c-88f1-1129d417c1f8-fernet-keys\") on node \"crc\" DevicePath \"\"" Oct 06 15:07:24 crc kubenswrapper[4757]: I1006 15:07:24.229420 4757 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/9756fa47-5175-400c-88f1-1129d417c1f8-credential-keys\") on node \"crc\" DevicePath \"\"" Oct 06 15:07:24 crc kubenswrapper[4757]: I1006 15:07:24.229501 4757 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9756fa47-5175-400c-88f1-1129d417c1f8-config-data\") on node \"crc\" DevicePath \"\"" Oct 06 15:07:24 crc kubenswrapper[4757]: I1006 15:07:24.229582 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qr7r6\" (UniqueName: \"kubernetes.io/projected/9756fa47-5175-400c-88f1-1129d417c1f8-kube-api-access-qr7r6\") on node \"crc\" DevicePath \"\"" Oct 06 15:07:24 crc kubenswrapper[4757]: I1006 15:07:24.659608 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-txzc6" event={"ID":"9756fa47-5175-400c-88f1-1129d417c1f8","Type":"ContainerDied","Data":"e56fd22b7ebe47161ba0eeba5f921aed0a2d6685928f3ba67fa818ace5d91dc4"} Oct 06 15:07:24 crc kubenswrapper[4757]: I1006 15:07:24.659933 4757 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e56fd22b7ebe47161ba0eeba5f921aed0a2d6685928f3ba67fa818ace5d91dc4" Oct 06 15:07:24 crc kubenswrapper[4757]: I1006 15:07:24.659998 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-txzc6" Oct 06 15:07:24 crc kubenswrapper[4757]: I1006 15:07:24.762196 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-txzc6"] Oct 06 15:07:24 crc kubenswrapper[4757]: I1006 15:07:24.774490 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-txzc6"] Oct 06 15:07:24 crc kubenswrapper[4757]: I1006 15:07:24.848008 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-7pmh5"] Oct 06 15:07:24 crc kubenswrapper[4757]: E1006 15:07:24.848465 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9756fa47-5175-400c-88f1-1129d417c1f8" containerName="keystone-bootstrap" Oct 06 15:07:24 crc kubenswrapper[4757]: I1006 15:07:24.848483 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="9756fa47-5175-400c-88f1-1129d417c1f8" containerName="keystone-bootstrap" Oct 06 15:07:24 crc kubenswrapper[4757]: I1006 15:07:24.848715 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="9756fa47-5175-400c-88f1-1129d417c1f8" containerName="keystone-bootstrap" Oct 06 15:07:24 crc kubenswrapper[4757]: I1006 15:07:24.849432 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-7pmh5" Oct 06 15:07:24 crc kubenswrapper[4757]: I1006 15:07:24.851799 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Oct 06 15:07:24 crc kubenswrapper[4757]: I1006 15:07:24.853052 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-8hbt2" Oct 06 15:07:24 crc kubenswrapper[4757]: I1006 15:07:24.853139 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Oct 06 15:07:24 crc kubenswrapper[4757]: I1006 15:07:24.853624 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Oct 06 15:07:24 crc kubenswrapper[4757]: I1006 15:07:24.867058 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-7pmh5"] Oct 06 15:07:24 crc kubenswrapper[4757]: I1006 15:07:24.941838 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7155f33d-aad8-4689-990c-799c2c1ab159-scripts\") pod \"keystone-bootstrap-7pmh5\" (UID: \"7155f33d-aad8-4689-990c-799c2c1ab159\") " pod="openstack/keystone-bootstrap-7pmh5" Oct 06 15:07:24 crc kubenswrapper[4757]: I1006 15:07:24.941880 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7155f33d-aad8-4689-990c-799c2c1ab159-config-data\") pod \"keystone-bootstrap-7pmh5\" (UID: \"7155f33d-aad8-4689-990c-799c2c1ab159\") " pod="openstack/keystone-bootstrap-7pmh5" Oct 06 15:07:24 crc kubenswrapper[4757]: I1006 15:07:24.941950 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bbhhm\" (UniqueName: \"kubernetes.io/projected/7155f33d-aad8-4689-990c-799c2c1ab159-kube-api-access-bbhhm\") pod \"keystone-bootstrap-7pmh5\" (UID: \"7155f33d-aad8-4689-990c-799c2c1ab159\") " pod="openstack/keystone-bootstrap-7pmh5" Oct 06 15:07:24 crc kubenswrapper[4757]: I1006 15:07:24.941970 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/7155f33d-aad8-4689-990c-799c2c1ab159-credential-keys\") pod \"keystone-bootstrap-7pmh5\" (UID: \"7155f33d-aad8-4689-990c-799c2c1ab159\") " pod="openstack/keystone-bootstrap-7pmh5" Oct 06 15:07:24 crc kubenswrapper[4757]: I1006 15:07:24.942156 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/7155f33d-aad8-4689-990c-799c2c1ab159-fernet-keys\") pod \"keystone-bootstrap-7pmh5\" (UID: \"7155f33d-aad8-4689-990c-799c2c1ab159\") " pod="openstack/keystone-bootstrap-7pmh5" Oct 06 15:07:24 crc kubenswrapper[4757]: I1006 15:07:24.942312 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7155f33d-aad8-4689-990c-799c2c1ab159-combined-ca-bundle\") pod \"keystone-bootstrap-7pmh5\" (UID: \"7155f33d-aad8-4689-990c-799c2c1ab159\") " pod="openstack/keystone-bootstrap-7pmh5" Oct 06 15:07:25 crc kubenswrapper[4757]: I1006 15:07:25.044363 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bbhhm\" (UniqueName: \"kubernetes.io/projected/7155f33d-aad8-4689-990c-799c2c1ab159-kube-api-access-bbhhm\") pod \"keystone-bootstrap-7pmh5\" (UID: \"7155f33d-aad8-4689-990c-799c2c1ab159\") " pod="openstack/keystone-bootstrap-7pmh5" Oct 06 15:07:25 crc kubenswrapper[4757]: I1006 15:07:25.044424 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/7155f33d-aad8-4689-990c-799c2c1ab159-credential-keys\") pod \"keystone-bootstrap-7pmh5\" (UID: \"7155f33d-aad8-4689-990c-799c2c1ab159\") " pod="openstack/keystone-bootstrap-7pmh5" Oct 06 15:07:25 crc kubenswrapper[4757]: I1006 15:07:25.044501 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/7155f33d-aad8-4689-990c-799c2c1ab159-fernet-keys\") pod \"keystone-bootstrap-7pmh5\" (UID: \"7155f33d-aad8-4689-990c-799c2c1ab159\") " pod="openstack/keystone-bootstrap-7pmh5" Oct 06 15:07:25 crc kubenswrapper[4757]: I1006 15:07:25.044564 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7155f33d-aad8-4689-990c-799c2c1ab159-combined-ca-bundle\") pod \"keystone-bootstrap-7pmh5\" (UID: \"7155f33d-aad8-4689-990c-799c2c1ab159\") " pod="openstack/keystone-bootstrap-7pmh5" Oct 06 15:07:25 crc kubenswrapper[4757]: I1006 15:07:25.044607 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7155f33d-aad8-4689-990c-799c2c1ab159-scripts\") pod \"keystone-bootstrap-7pmh5\" (UID: \"7155f33d-aad8-4689-990c-799c2c1ab159\") " pod="openstack/keystone-bootstrap-7pmh5" Oct 06 15:07:25 crc kubenswrapper[4757]: I1006 15:07:25.044626 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7155f33d-aad8-4689-990c-799c2c1ab159-config-data\") pod \"keystone-bootstrap-7pmh5\" (UID: \"7155f33d-aad8-4689-990c-799c2c1ab159\") " pod="openstack/keystone-bootstrap-7pmh5" Oct 06 15:07:25 crc kubenswrapper[4757]: I1006 15:07:25.050232 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/7155f33d-aad8-4689-990c-799c2c1ab159-credential-keys\") pod \"keystone-bootstrap-7pmh5\" (UID: \"7155f33d-aad8-4689-990c-799c2c1ab159\") " pod="openstack/keystone-bootstrap-7pmh5" Oct 06 15:07:25 crc kubenswrapper[4757]: I1006 15:07:25.050260 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7155f33d-aad8-4689-990c-799c2c1ab159-config-data\") pod \"keystone-bootstrap-7pmh5\" (UID: \"7155f33d-aad8-4689-990c-799c2c1ab159\") " pod="openstack/keystone-bootstrap-7pmh5" Oct 06 15:07:25 crc kubenswrapper[4757]: I1006 15:07:25.051179 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/7155f33d-aad8-4689-990c-799c2c1ab159-fernet-keys\") pod \"keystone-bootstrap-7pmh5\" (UID: \"7155f33d-aad8-4689-990c-799c2c1ab159\") " pod="openstack/keystone-bootstrap-7pmh5" Oct 06 15:07:25 crc kubenswrapper[4757]: I1006 15:07:25.057463 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7155f33d-aad8-4689-990c-799c2c1ab159-scripts\") pod \"keystone-bootstrap-7pmh5\" (UID: \"7155f33d-aad8-4689-990c-799c2c1ab159\") " pod="openstack/keystone-bootstrap-7pmh5" Oct 06 15:07:25 crc kubenswrapper[4757]: I1006 15:07:25.064034 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7155f33d-aad8-4689-990c-799c2c1ab159-combined-ca-bundle\") pod \"keystone-bootstrap-7pmh5\" (UID: \"7155f33d-aad8-4689-990c-799c2c1ab159\") " pod="openstack/keystone-bootstrap-7pmh5" Oct 06 15:07:25 crc kubenswrapper[4757]: I1006 15:07:25.067273 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bbhhm\" (UniqueName: \"kubernetes.io/projected/7155f33d-aad8-4689-990c-799c2c1ab159-kube-api-access-bbhhm\") pod \"keystone-bootstrap-7pmh5\" (UID: \"7155f33d-aad8-4689-990c-799c2c1ab159\") " pod="openstack/keystone-bootstrap-7pmh5" Oct 06 15:07:25 crc kubenswrapper[4757]: I1006 15:07:25.173325 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-7pmh5" Oct 06 15:07:25 crc kubenswrapper[4757]: I1006 15:07:25.628411 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-7pmh5"] Oct 06 15:07:25 crc kubenswrapper[4757]: W1006 15:07:25.637357 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7155f33d_aad8_4689_990c_799c2c1ab159.slice/crio-48f2837310bb71c11b623e066df779fe0cbe3d304a5b614559e77e7072cc7e42 WatchSource:0}: Error finding container 48f2837310bb71c11b623e066df779fe0cbe3d304a5b614559e77e7072cc7e42: Status 404 returned error can't find the container with id 48f2837310bb71c11b623e066df779fe0cbe3d304a5b614559e77e7072cc7e42 Oct 06 15:07:25 crc kubenswrapper[4757]: I1006 15:07:25.680465 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-7pmh5" event={"ID":"7155f33d-aad8-4689-990c-799c2c1ab159","Type":"ContainerStarted","Data":"48f2837310bb71c11b623e066df779fe0cbe3d304a5b614559e77e7072cc7e42"} Oct 06 15:07:26 crc kubenswrapper[4757]: I1006 15:07:26.207056 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9756fa47-5175-400c-88f1-1129d417c1f8" path="/var/lib/kubelet/pods/9756fa47-5175-400c-88f1-1129d417c1f8/volumes" Oct 06 15:07:26 crc kubenswrapper[4757]: I1006 15:07:26.690277 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-7pmh5" event={"ID":"7155f33d-aad8-4689-990c-799c2c1ab159","Type":"ContainerStarted","Data":"843427f6731161d2a8add1b96ddc64a45e59c1e77d894b3250f6609671a89283"} Oct 06 15:07:26 crc kubenswrapper[4757]: I1006 15:07:26.728552 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-7pmh5" podStartSLOduration=2.728516899 podStartE2EDuration="2.728516899s" podCreationTimestamp="2025-10-06 15:07:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 15:07:26.717995459 +0000 UTC m=+5335.215314016" watchObservedRunningTime="2025-10-06 15:07:26.728516899 +0000 UTC m=+5335.225835466" Oct 06 15:07:28 crc kubenswrapper[4757]: I1006 15:07:28.153435 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-cf9d54b65-rdpsv" Oct 06 15:07:28 crc kubenswrapper[4757]: I1006 15:07:28.236906 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-79d994df79-65xps"] Oct 06 15:07:28 crc kubenswrapper[4757]: I1006 15:07:28.237205 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-79d994df79-65xps" podUID="1e6b6aab-a6bc-4d94-8b12-9d306ca4260a" containerName="dnsmasq-dns" containerID="cri-o://d2d295724c333bdd2fc3e8a7f7b3afafb384048883cb1085c44fa09082c0c7cc" gracePeriod=10 Oct 06 15:07:28 crc kubenswrapper[4757]: I1006 15:07:28.707884 4757 generic.go:334] "Generic (PLEG): container finished" podID="7155f33d-aad8-4689-990c-799c2c1ab159" containerID="843427f6731161d2a8add1b96ddc64a45e59c1e77d894b3250f6609671a89283" exitCode=0 Oct 06 15:07:28 crc kubenswrapper[4757]: I1006 15:07:28.708004 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-7pmh5" event={"ID":"7155f33d-aad8-4689-990c-799c2c1ab159","Type":"ContainerDied","Data":"843427f6731161d2a8add1b96ddc64a45e59c1e77d894b3250f6609671a89283"} Oct 06 15:07:28 crc kubenswrapper[4757]: I1006 15:07:28.712815 4757 generic.go:334] "Generic (PLEG): container finished" podID="1e6b6aab-a6bc-4d94-8b12-9d306ca4260a" containerID="d2d295724c333bdd2fc3e8a7f7b3afafb384048883cb1085c44fa09082c0c7cc" exitCode=0 Oct 06 15:07:28 crc kubenswrapper[4757]: I1006 15:07:28.712898 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-79d994df79-65xps" event={"ID":"1e6b6aab-a6bc-4d94-8b12-9d306ca4260a","Type":"ContainerDied","Data":"d2d295724c333bdd2fc3e8a7f7b3afafb384048883cb1085c44fa09082c0c7cc"} Oct 06 15:07:28 crc kubenswrapper[4757]: I1006 15:07:28.712941 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-79d994df79-65xps" event={"ID":"1e6b6aab-a6bc-4d94-8b12-9d306ca4260a","Type":"ContainerDied","Data":"5645da8190e1d569cbf9429c4320db3db2b3d5799370fe08b22455028331f40f"} Oct 06 15:07:28 crc kubenswrapper[4757]: I1006 15:07:28.712956 4757 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5645da8190e1d569cbf9429c4320db3db2b3d5799370fe08b22455028331f40f" Oct 06 15:07:28 crc kubenswrapper[4757]: I1006 15:07:28.733046 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-79d994df79-65xps" Oct 06 15:07:28 crc kubenswrapper[4757]: I1006 15:07:28.814561 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1e6b6aab-a6bc-4d94-8b12-9d306ca4260a-ovsdbserver-sb\") pod \"1e6b6aab-a6bc-4d94-8b12-9d306ca4260a\" (UID: \"1e6b6aab-a6bc-4d94-8b12-9d306ca4260a\") " Oct 06 15:07:28 crc kubenswrapper[4757]: I1006 15:07:28.814625 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1e6b6aab-a6bc-4d94-8b12-9d306ca4260a-ovsdbserver-nb\") pod \"1e6b6aab-a6bc-4d94-8b12-9d306ca4260a\" (UID: \"1e6b6aab-a6bc-4d94-8b12-9d306ca4260a\") " Oct 06 15:07:28 crc kubenswrapper[4757]: I1006 15:07:28.814713 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1e6b6aab-a6bc-4d94-8b12-9d306ca4260a-config\") pod \"1e6b6aab-a6bc-4d94-8b12-9d306ca4260a\" (UID: \"1e6b6aab-a6bc-4d94-8b12-9d306ca4260a\") " Oct 06 15:07:28 crc kubenswrapper[4757]: I1006 15:07:28.814755 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1e6b6aab-a6bc-4d94-8b12-9d306ca4260a-dns-svc\") pod \"1e6b6aab-a6bc-4d94-8b12-9d306ca4260a\" (UID: \"1e6b6aab-a6bc-4d94-8b12-9d306ca4260a\") " Oct 06 15:07:28 crc kubenswrapper[4757]: I1006 15:07:28.814851 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hnbgl\" (UniqueName: \"kubernetes.io/projected/1e6b6aab-a6bc-4d94-8b12-9d306ca4260a-kube-api-access-hnbgl\") pod \"1e6b6aab-a6bc-4d94-8b12-9d306ca4260a\" (UID: \"1e6b6aab-a6bc-4d94-8b12-9d306ca4260a\") " Oct 06 15:07:28 crc kubenswrapper[4757]: I1006 15:07:28.820033 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1e6b6aab-a6bc-4d94-8b12-9d306ca4260a-kube-api-access-hnbgl" (OuterVolumeSpecName: "kube-api-access-hnbgl") pod "1e6b6aab-a6bc-4d94-8b12-9d306ca4260a" (UID: "1e6b6aab-a6bc-4d94-8b12-9d306ca4260a"). InnerVolumeSpecName "kube-api-access-hnbgl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 15:07:28 crc kubenswrapper[4757]: I1006 15:07:28.852803 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1e6b6aab-a6bc-4d94-8b12-9d306ca4260a-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "1e6b6aab-a6bc-4d94-8b12-9d306ca4260a" (UID: "1e6b6aab-a6bc-4d94-8b12-9d306ca4260a"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 15:07:28 crc kubenswrapper[4757]: I1006 15:07:28.859288 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1e6b6aab-a6bc-4d94-8b12-9d306ca4260a-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "1e6b6aab-a6bc-4d94-8b12-9d306ca4260a" (UID: "1e6b6aab-a6bc-4d94-8b12-9d306ca4260a"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 15:07:28 crc kubenswrapper[4757]: I1006 15:07:28.859611 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1e6b6aab-a6bc-4d94-8b12-9d306ca4260a-config" (OuterVolumeSpecName: "config") pod "1e6b6aab-a6bc-4d94-8b12-9d306ca4260a" (UID: "1e6b6aab-a6bc-4d94-8b12-9d306ca4260a"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 15:07:28 crc kubenswrapper[4757]: I1006 15:07:28.866522 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1e6b6aab-a6bc-4d94-8b12-9d306ca4260a-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "1e6b6aab-a6bc-4d94-8b12-9d306ca4260a" (UID: "1e6b6aab-a6bc-4d94-8b12-9d306ca4260a"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 15:07:28 crc kubenswrapper[4757]: I1006 15:07:28.917178 4757 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1e6b6aab-a6bc-4d94-8b12-9d306ca4260a-config\") on node \"crc\" DevicePath \"\"" Oct 06 15:07:28 crc kubenswrapper[4757]: I1006 15:07:28.917212 4757 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1e6b6aab-a6bc-4d94-8b12-9d306ca4260a-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 06 15:07:28 crc kubenswrapper[4757]: I1006 15:07:28.917224 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hnbgl\" (UniqueName: \"kubernetes.io/projected/1e6b6aab-a6bc-4d94-8b12-9d306ca4260a-kube-api-access-hnbgl\") on node \"crc\" DevicePath \"\"" Oct 06 15:07:28 crc kubenswrapper[4757]: I1006 15:07:28.917237 4757 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1e6b6aab-a6bc-4d94-8b12-9d306ca4260a-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 06 15:07:28 crc kubenswrapper[4757]: I1006 15:07:28.917248 4757 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1e6b6aab-a6bc-4d94-8b12-9d306ca4260a-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 06 15:07:29 crc kubenswrapper[4757]: I1006 15:07:29.724966 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-79d994df79-65xps" Oct 06 15:07:29 crc kubenswrapper[4757]: I1006 15:07:29.783760 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-79d994df79-65xps"] Oct 06 15:07:29 crc kubenswrapper[4757]: I1006 15:07:29.791675 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-79d994df79-65xps"] Oct 06 15:07:30 crc kubenswrapper[4757]: I1006 15:07:30.032136 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-7pmh5" Oct 06 15:07:30 crc kubenswrapper[4757]: I1006 15:07:30.136350 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7155f33d-aad8-4689-990c-799c2c1ab159-combined-ca-bundle\") pod \"7155f33d-aad8-4689-990c-799c2c1ab159\" (UID: \"7155f33d-aad8-4689-990c-799c2c1ab159\") " Oct 06 15:07:30 crc kubenswrapper[4757]: I1006 15:07:30.136429 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bbhhm\" (UniqueName: \"kubernetes.io/projected/7155f33d-aad8-4689-990c-799c2c1ab159-kube-api-access-bbhhm\") pod \"7155f33d-aad8-4689-990c-799c2c1ab159\" (UID: \"7155f33d-aad8-4689-990c-799c2c1ab159\") " Oct 06 15:07:30 crc kubenswrapper[4757]: I1006 15:07:30.136483 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/7155f33d-aad8-4689-990c-799c2c1ab159-fernet-keys\") pod \"7155f33d-aad8-4689-990c-799c2c1ab159\" (UID: \"7155f33d-aad8-4689-990c-799c2c1ab159\") " Oct 06 15:07:30 crc kubenswrapper[4757]: I1006 15:07:30.136542 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7155f33d-aad8-4689-990c-799c2c1ab159-config-data\") pod \"7155f33d-aad8-4689-990c-799c2c1ab159\" (UID: \"7155f33d-aad8-4689-990c-799c2c1ab159\") " Oct 06 15:07:30 crc kubenswrapper[4757]: I1006 15:07:30.136581 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7155f33d-aad8-4689-990c-799c2c1ab159-scripts\") pod \"7155f33d-aad8-4689-990c-799c2c1ab159\" (UID: \"7155f33d-aad8-4689-990c-799c2c1ab159\") " Oct 06 15:07:30 crc kubenswrapper[4757]: I1006 15:07:30.136597 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/7155f33d-aad8-4689-990c-799c2c1ab159-credential-keys\") pod \"7155f33d-aad8-4689-990c-799c2c1ab159\" (UID: \"7155f33d-aad8-4689-990c-799c2c1ab159\") " Oct 06 15:07:30 crc kubenswrapper[4757]: I1006 15:07:30.140862 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7155f33d-aad8-4689-990c-799c2c1ab159-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "7155f33d-aad8-4689-990c-799c2c1ab159" (UID: "7155f33d-aad8-4689-990c-799c2c1ab159"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 15:07:30 crc kubenswrapper[4757]: I1006 15:07:30.142290 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7155f33d-aad8-4689-990c-799c2c1ab159-scripts" (OuterVolumeSpecName: "scripts") pod "7155f33d-aad8-4689-990c-799c2c1ab159" (UID: "7155f33d-aad8-4689-990c-799c2c1ab159"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 15:07:30 crc kubenswrapper[4757]: I1006 15:07:30.143150 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7155f33d-aad8-4689-990c-799c2c1ab159-kube-api-access-bbhhm" (OuterVolumeSpecName: "kube-api-access-bbhhm") pod "7155f33d-aad8-4689-990c-799c2c1ab159" (UID: "7155f33d-aad8-4689-990c-799c2c1ab159"). InnerVolumeSpecName "kube-api-access-bbhhm". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 15:07:30 crc kubenswrapper[4757]: I1006 15:07:30.146974 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7155f33d-aad8-4689-990c-799c2c1ab159-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "7155f33d-aad8-4689-990c-799c2c1ab159" (UID: "7155f33d-aad8-4689-990c-799c2c1ab159"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 15:07:30 crc kubenswrapper[4757]: I1006 15:07:30.159794 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7155f33d-aad8-4689-990c-799c2c1ab159-config-data" (OuterVolumeSpecName: "config-data") pod "7155f33d-aad8-4689-990c-799c2c1ab159" (UID: "7155f33d-aad8-4689-990c-799c2c1ab159"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 15:07:30 crc kubenswrapper[4757]: I1006 15:07:30.169185 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7155f33d-aad8-4689-990c-799c2c1ab159-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7155f33d-aad8-4689-990c-799c2c1ab159" (UID: "7155f33d-aad8-4689-990c-799c2c1ab159"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 15:07:30 crc kubenswrapper[4757]: I1006 15:07:30.193841 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1e6b6aab-a6bc-4d94-8b12-9d306ca4260a" path="/var/lib/kubelet/pods/1e6b6aab-a6bc-4d94-8b12-9d306ca4260a/volumes" Oct 06 15:07:30 crc kubenswrapper[4757]: I1006 15:07:30.239322 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bbhhm\" (UniqueName: \"kubernetes.io/projected/7155f33d-aad8-4689-990c-799c2c1ab159-kube-api-access-bbhhm\") on node \"crc\" DevicePath \"\"" Oct 06 15:07:30 crc kubenswrapper[4757]: I1006 15:07:30.239351 4757 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/7155f33d-aad8-4689-990c-799c2c1ab159-fernet-keys\") on node \"crc\" DevicePath \"\"" Oct 06 15:07:30 crc kubenswrapper[4757]: I1006 15:07:30.239362 4757 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7155f33d-aad8-4689-990c-799c2c1ab159-config-data\") on node \"crc\" DevicePath \"\"" Oct 06 15:07:30 crc kubenswrapper[4757]: I1006 15:07:30.239371 4757 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7155f33d-aad8-4689-990c-799c2c1ab159-scripts\") on node \"crc\" DevicePath \"\"" Oct 06 15:07:30 crc kubenswrapper[4757]: I1006 15:07:30.239380 4757 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/7155f33d-aad8-4689-990c-799c2c1ab159-credential-keys\") on node \"crc\" DevicePath \"\"" Oct 06 15:07:30 crc kubenswrapper[4757]: I1006 15:07:30.239388 4757 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7155f33d-aad8-4689-990c-799c2c1ab159-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 06 15:07:30 crc kubenswrapper[4757]: I1006 15:07:30.735702 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-7pmh5" event={"ID":"7155f33d-aad8-4689-990c-799c2c1ab159","Type":"ContainerDied","Data":"48f2837310bb71c11b623e066df779fe0cbe3d304a5b614559e77e7072cc7e42"} Oct 06 15:07:30 crc kubenswrapper[4757]: I1006 15:07:30.737156 4757 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="48f2837310bb71c11b623e066df779fe0cbe3d304a5b614559e77e7072cc7e42" Oct 06 15:07:30 crc kubenswrapper[4757]: I1006 15:07:30.735772 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-7pmh5" Oct 06 15:07:31 crc kubenswrapper[4757]: I1006 15:07:31.160016 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-5c5bc6dbbd-g2zwx"] Oct 06 15:07:31 crc kubenswrapper[4757]: E1006 15:07:31.160529 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1e6b6aab-a6bc-4d94-8b12-9d306ca4260a" containerName="dnsmasq-dns" Oct 06 15:07:31 crc kubenswrapper[4757]: I1006 15:07:31.160560 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="1e6b6aab-a6bc-4d94-8b12-9d306ca4260a" containerName="dnsmasq-dns" Oct 06 15:07:31 crc kubenswrapper[4757]: E1006 15:07:31.160582 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7155f33d-aad8-4689-990c-799c2c1ab159" containerName="keystone-bootstrap" Oct 06 15:07:31 crc kubenswrapper[4757]: I1006 15:07:31.160594 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="7155f33d-aad8-4689-990c-799c2c1ab159" containerName="keystone-bootstrap" Oct 06 15:07:31 crc kubenswrapper[4757]: E1006 15:07:31.160627 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1e6b6aab-a6bc-4d94-8b12-9d306ca4260a" containerName="init" Oct 06 15:07:31 crc kubenswrapper[4757]: I1006 15:07:31.160638 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="1e6b6aab-a6bc-4d94-8b12-9d306ca4260a" containerName="init" Oct 06 15:07:31 crc kubenswrapper[4757]: I1006 15:07:31.161368 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="1e6b6aab-a6bc-4d94-8b12-9d306ca4260a" containerName="dnsmasq-dns" Oct 06 15:07:31 crc kubenswrapper[4757]: I1006 15:07:31.161402 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="7155f33d-aad8-4689-990c-799c2c1ab159" containerName="keystone-bootstrap" Oct 06 15:07:31 crc kubenswrapper[4757]: I1006 15:07:31.162220 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-5c5bc6dbbd-g2zwx" Oct 06 15:07:31 crc kubenswrapper[4757]: I1006 15:07:31.169470 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Oct 06 15:07:31 crc kubenswrapper[4757]: I1006 15:07:31.171779 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-internal-svc" Oct 06 15:07:31 crc kubenswrapper[4757]: I1006 15:07:31.171981 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-8hbt2" Oct 06 15:07:31 crc kubenswrapper[4757]: I1006 15:07:31.172202 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-public-svc" Oct 06 15:07:31 crc kubenswrapper[4757]: I1006 15:07:31.172333 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Oct 06 15:07:31 crc kubenswrapper[4757]: I1006 15:07:31.172929 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Oct 06 15:07:31 crc kubenswrapper[4757]: I1006 15:07:31.185128 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-5c5bc6dbbd-g2zwx"] Oct 06 15:07:31 crc kubenswrapper[4757]: I1006 15:07:31.261220 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/15855896-fa59-487b-87b6-37fdfa827188-public-tls-certs\") pod \"keystone-5c5bc6dbbd-g2zwx\" (UID: \"15855896-fa59-487b-87b6-37fdfa827188\") " pod="openstack/keystone-5c5bc6dbbd-g2zwx" Oct 06 15:07:31 crc kubenswrapper[4757]: I1006 15:07:31.262379 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/15855896-fa59-487b-87b6-37fdfa827188-credential-keys\") pod \"keystone-5c5bc6dbbd-g2zwx\" (UID: \"15855896-fa59-487b-87b6-37fdfa827188\") " pod="openstack/keystone-5c5bc6dbbd-g2zwx" Oct 06 15:07:31 crc kubenswrapper[4757]: I1006 15:07:31.262535 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kfx8j\" (UniqueName: \"kubernetes.io/projected/15855896-fa59-487b-87b6-37fdfa827188-kube-api-access-kfx8j\") pod \"keystone-5c5bc6dbbd-g2zwx\" (UID: \"15855896-fa59-487b-87b6-37fdfa827188\") " pod="openstack/keystone-5c5bc6dbbd-g2zwx" Oct 06 15:07:31 crc kubenswrapper[4757]: I1006 15:07:31.262583 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/15855896-fa59-487b-87b6-37fdfa827188-internal-tls-certs\") pod \"keystone-5c5bc6dbbd-g2zwx\" (UID: \"15855896-fa59-487b-87b6-37fdfa827188\") " pod="openstack/keystone-5c5bc6dbbd-g2zwx" Oct 06 15:07:31 crc kubenswrapper[4757]: I1006 15:07:31.262737 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15855896-fa59-487b-87b6-37fdfa827188-combined-ca-bundle\") pod \"keystone-5c5bc6dbbd-g2zwx\" (UID: \"15855896-fa59-487b-87b6-37fdfa827188\") " pod="openstack/keystone-5c5bc6dbbd-g2zwx" Oct 06 15:07:31 crc kubenswrapper[4757]: I1006 15:07:31.262805 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/15855896-fa59-487b-87b6-37fdfa827188-scripts\") pod \"keystone-5c5bc6dbbd-g2zwx\" (UID: \"15855896-fa59-487b-87b6-37fdfa827188\") " pod="openstack/keystone-5c5bc6dbbd-g2zwx" Oct 06 15:07:31 crc kubenswrapper[4757]: I1006 15:07:31.262917 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/15855896-fa59-487b-87b6-37fdfa827188-fernet-keys\") pod \"keystone-5c5bc6dbbd-g2zwx\" (UID: \"15855896-fa59-487b-87b6-37fdfa827188\") " pod="openstack/keystone-5c5bc6dbbd-g2zwx" Oct 06 15:07:31 crc kubenswrapper[4757]: I1006 15:07:31.263139 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/15855896-fa59-487b-87b6-37fdfa827188-config-data\") pod \"keystone-5c5bc6dbbd-g2zwx\" (UID: \"15855896-fa59-487b-87b6-37fdfa827188\") " pod="openstack/keystone-5c5bc6dbbd-g2zwx" Oct 06 15:07:31 crc kubenswrapper[4757]: I1006 15:07:31.364700 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/15855896-fa59-487b-87b6-37fdfa827188-public-tls-certs\") pod \"keystone-5c5bc6dbbd-g2zwx\" (UID: \"15855896-fa59-487b-87b6-37fdfa827188\") " pod="openstack/keystone-5c5bc6dbbd-g2zwx" Oct 06 15:07:31 crc kubenswrapper[4757]: I1006 15:07:31.364817 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/15855896-fa59-487b-87b6-37fdfa827188-credential-keys\") pod \"keystone-5c5bc6dbbd-g2zwx\" (UID: \"15855896-fa59-487b-87b6-37fdfa827188\") " pod="openstack/keystone-5c5bc6dbbd-g2zwx" Oct 06 15:07:31 crc kubenswrapper[4757]: I1006 15:07:31.364854 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kfx8j\" (UniqueName: \"kubernetes.io/projected/15855896-fa59-487b-87b6-37fdfa827188-kube-api-access-kfx8j\") pod \"keystone-5c5bc6dbbd-g2zwx\" (UID: \"15855896-fa59-487b-87b6-37fdfa827188\") " pod="openstack/keystone-5c5bc6dbbd-g2zwx" Oct 06 15:07:31 crc kubenswrapper[4757]: I1006 15:07:31.364879 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/15855896-fa59-487b-87b6-37fdfa827188-internal-tls-certs\") pod \"keystone-5c5bc6dbbd-g2zwx\" (UID: \"15855896-fa59-487b-87b6-37fdfa827188\") " pod="openstack/keystone-5c5bc6dbbd-g2zwx" Oct 06 15:07:31 crc kubenswrapper[4757]: I1006 15:07:31.364915 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15855896-fa59-487b-87b6-37fdfa827188-combined-ca-bundle\") pod \"keystone-5c5bc6dbbd-g2zwx\" (UID: \"15855896-fa59-487b-87b6-37fdfa827188\") " pod="openstack/keystone-5c5bc6dbbd-g2zwx" Oct 06 15:07:31 crc kubenswrapper[4757]: I1006 15:07:31.364942 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/15855896-fa59-487b-87b6-37fdfa827188-scripts\") pod \"keystone-5c5bc6dbbd-g2zwx\" (UID: \"15855896-fa59-487b-87b6-37fdfa827188\") " pod="openstack/keystone-5c5bc6dbbd-g2zwx" Oct 06 15:07:31 crc kubenswrapper[4757]: I1006 15:07:31.364983 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/15855896-fa59-487b-87b6-37fdfa827188-fernet-keys\") pod \"keystone-5c5bc6dbbd-g2zwx\" (UID: \"15855896-fa59-487b-87b6-37fdfa827188\") " pod="openstack/keystone-5c5bc6dbbd-g2zwx" Oct 06 15:07:31 crc kubenswrapper[4757]: I1006 15:07:31.365033 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/15855896-fa59-487b-87b6-37fdfa827188-config-data\") pod \"keystone-5c5bc6dbbd-g2zwx\" (UID: \"15855896-fa59-487b-87b6-37fdfa827188\") " pod="openstack/keystone-5c5bc6dbbd-g2zwx" Oct 06 15:07:31 crc kubenswrapper[4757]: I1006 15:07:31.368840 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/15855896-fa59-487b-87b6-37fdfa827188-public-tls-certs\") pod \"keystone-5c5bc6dbbd-g2zwx\" (UID: \"15855896-fa59-487b-87b6-37fdfa827188\") " pod="openstack/keystone-5c5bc6dbbd-g2zwx" Oct 06 15:07:31 crc kubenswrapper[4757]: I1006 15:07:31.368977 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/15855896-fa59-487b-87b6-37fdfa827188-fernet-keys\") pod \"keystone-5c5bc6dbbd-g2zwx\" (UID: \"15855896-fa59-487b-87b6-37fdfa827188\") " pod="openstack/keystone-5c5bc6dbbd-g2zwx" Oct 06 15:07:31 crc kubenswrapper[4757]: I1006 15:07:31.369696 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/15855896-fa59-487b-87b6-37fdfa827188-config-data\") pod \"keystone-5c5bc6dbbd-g2zwx\" (UID: \"15855896-fa59-487b-87b6-37fdfa827188\") " pod="openstack/keystone-5c5bc6dbbd-g2zwx" Oct 06 15:07:31 crc kubenswrapper[4757]: I1006 15:07:31.369803 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/15855896-fa59-487b-87b6-37fdfa827188-scripts\") pod \"keystone-5c5bc6dbbd-g2zwx\" (UID: \"15855896-fa59-487b-87b6-37fdfa827188\") " pod="openstack/keystone-5c5bc6dbbd-g2zwx" Oct 06 15:07:31 crc kubenswrapper[4757]: I1006 15:07:31.369979 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/15855896-fa59-487b-87b6-37fdfa827188-internal-tls-certs\") pod \"keystone-5c5bc6dbbd-g2zwx\" (UID: \"15855896-fa59-487b-87b6-37fdfa827188\") " pod="openstack/keystone-5c5bc6dbbd-g2zwx" Oct 06 15:07:31 crc kubenswrapper[4757]: I1006 15:07:31.376526 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/15855896-fa59-487b-87b6-37fdfa827188-credential-keys\") pod \"keystone-5c5bc6dbbd-g2zwx\" (UID: \"15855896-fa59-487b-87b6-37fdfa827188\") " pod="openstack/keystone-5c5bc6dbbd-g2zwx" Oct 06 15:07:31 crc kubenswrapper[4757]: I1006 15:07:31.387837 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15855896-fa59-487b-87b6-37fdfa827188-combined-ca-bundle\") pod \"keystone-5c5bc6dbbd-g2zwx\" (UID: \"15855896-fa59-487b-87b6-37fdfa827188\") " pod="openstack/keystone-5c5bc6dbbd-g2zwx" Oct 06 15:07:31 crc kubenswrapper[4757]: I1006 15:07:31.392126 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kfx8j\" (UniqueName: \"kubernetes.io/projected/15855896-fa59-487b-87b6-37fdfa827188-kube-api-access-kfx8j\") pod \"keystone-5c5bc6dbbd-g2zwx\" (UID: \"15855896-fa59-487b-87b6-37fdfa827188\") " pod="openstack/keystone-5c5bc6dbbd-g2zwx" Oct 06 15:07:31 crc kubenswrapper[4757]: I1006 15:07:31.506497 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-5c5bc6dbbd-g2zwx" Oct 06 15:07:31 crc kubenswrapper[4757]: I1006 15:07:31.982533 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-5c5bc6dbbd-g2zwx"] Oct 06 15:07:32 crc kubenswrapper[4757]: I1006 15:07:32.755699 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-5c5bc6dbbd-g2zwx" event={"ID":"15855896-fa59-487b-87b6-37fdfa827188","Type":"ContainerStarted","Data":"412f69d1e777b60313397ab51602e61aadbd11b7b2418c00f2f7a739413a07ac"} Oct 06 15:07:32 crc kubenswrapper[4757]: I1006 15:07:32.756117 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-5c5bc6dbbd-g2zwx" event={"ID":"15855896-fa59-487b-87b6-37fdfa827188","Type":"ContainerStarted","Data":"04abdb4faf5d04fa120e7149c927b52a26b08542cf7bb13b411f7110b2aac4f7"} Oct 06 15:07:32 crc kubenswrapper[4757]: I1006 15:07:32.756154 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-5c5bc6dbbd-g2zwx" Oct 06 15:08:02 crc kubenswrapper[4757]: I1006 15:08:02.998175 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-5c5bc6dbbd-g2zwx" Oct 06 15:08:03 crc kubenswrapper[4757]: I1006 15:08:03.025257 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-5c5bc6dbbd-g2zwx" podStartSLOduration=32.025233976 podStartE2EDuration="32.025233976s" podCreationTimestamp="2025-10-06 15:07:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 15:07:32.784996157 +0000 UTC m=+5341.282314734" watchObservedRunningTime="2025-10-06 15:08:03.025233976 +0000 UTC m=+5371.522552513" Oct 06 15:08:07 crc kubenswrapper[4757]: I1006 15:08:07.317034 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Oct 06 15:08:07 crc kubenswrapper[4757]: I1006 15:08:07.319429 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Oct 06 15:08:07 crc kubenswrapper[4757]: I1006 15:08:07.324861 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Oct 06 15:08:07 crc kubenswrapper[4757]: I1006 15:08:07.324921 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-p5kqk" Oct 06 15:08:07 crc kubenswrapper[4757]: I1006 15:08:07.324869 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Oct 06 15:08:07 crc kubenswrapper[4757]: I1006 15:08:07.337355 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Oct 06 15:08:07 crc kubenswrapper[4757]: I1006 15:08:07.403670 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/8ba32982-b0f0-4865-9bd6-b249bbbfafd2-openstack-config\") pod \"openstackclient\" (UID: \"8ba32982-b0f0-4865-9bd6-b249bbbfafd2\") " pod="openstack/openstackclient" Oct 06 15:08:07 crc kubenswrapper[4757]: I1006 15:08:07.403787 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/8ba32982-b0f0-4865-9bd6-b249bbbfafd2-openstack-config-secret\") pod \"openstackclient\" (UID: \"8ba32982-b0f0-4865-9bd6-b249bbbfafd2\") " pod="openstack/openstackclient" Oct 06 15:08:07 crc kubenswrapper[4757]: I1006 15:08:07.403819 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jn4lb\" (UniqueName: \"kubernetes.io/projected/8ba32982-b0f0-4865-9bd6-b249bbbfafd2-kube-api-access-jn4lb\") pod \"openstackclient\" (UID: \"8ba32982-b0f0-4865-9bd6-b249bbbfafd2\") " pod="openstack/openstackclient" Oct 06 15:08:07 crc kubenswrapper[4757]: I1006 15:08:07.403938 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ba32982-b0f0-4865-9bd6-b249bbbfafd2-combined-ca-bundle\") pod \"openstackclient\" (UID: \"8ba32982-b0f0-4865-9bd6-b249bbbfafd2\") " pod="openstack/openstackclient" Oct 06 15:08:07 crc kubenswrapper[4757]: I1006 15:08:07.505235 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ba32982-b0f0-4865-9bd6-b249bbbfafd2-combined-ca-bundle\") pod \"openstackclient\" (UID: \"8ba32982-b0f0-4865-9bd6-b249bbbfafd2\") " pod="openstack/openstackclient" Oct 06 15:08:07 crc kubenswrapper[4757]: I1006 15:08:07.505300 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/8ba32982-b0f0-4865-9bd6-b249bbbfafd2-openstack-config\") pod \"openstackclient\" (UID: \"8ba32982-b0f0-4865-9bd6-b249bbbfafd2\") " pod="openstack/openstackclient" Oct 06 15:08:07 crc kubenswrapper[4757]: I1006 15:08:07.505355 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jn4lb\" (UniqueName: \"kubernetes.io/projected/8ba32982-b0f0-4865-9bd6-b249bbbfafd2-kube-api-access-jn4lb\") pod \"openstackclient\" (UID: \"8ba32982-b0f0-4865-9bd6-b249bbbfafd2\") " pod="openstack/openstackclient" Oct 06 15:08:07 crc kubenswrapper[4757]: I1006 15:08:07.505372 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/8ba32982-b0f0-4865-9bd6-b249bbbfafd2-openstack-config-secret\") pod \"openstackclient\" (UID: \"8ba32982-b0f0-4865-9bd6-b249bbbfafd2\") " pod="openstack/openstackclient" Oct 06 15:08:07 crc kubenswrapper[4757]: I1006 15:08:07.507543 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/8ba32982-b0f0-4865-9bd6-b249bbbfafd2-openstack-config\") pod \"openstackclient\" (UID: \"8ba32982-b0f0-4865-9bd6-b249bbbfafd2\") " pod="openstack/openstackclient" Oct 06 15:08:07 crc kubenswrapper[4757]: I1006 15:08:07.510919 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ba32982-b0f0-4865-9bd6-b249bbbfafd2-combined-ca-bundle\") pod \"openstackclient\" (UID: \"8ba32982-b0f0-4865-9bd6-b249bbbfafd2\") " pod="openstack/openstackclient" Oct 06 15:08:07 crc kubenswrapper[4757]: I1006 15:08:07.519975 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/8ba32982-b0f0-4865-9bd6-b249bbbfafd2-openstack-config-secret\") pod \"openstackclient\" (UID: \"8ba32982-b0f0-4865-9bd6-b249bbbfafd2\") " pod="openstack/openstackclient" Oct 06 15:08:07 crc kubenswrapper[4757]: I1006 15:08:07.521544 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jn4lb\" (UniqueName: \"kubernetes.io/projected/8ba32982-b0f0-4865-9bd6-b249bbbfafd2-kube-api-access-jn4lb\") pod \"openstackclient\" (UID: \"8ba32982-b0f0-4865-9bd6-b249bbbfafd2\") " pod="openstack/openstackclient" Oct 06 15:08:07 crc kubenswrapper[4757]: I1006 15:08:07.645188 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Oct 06 15:08:08 crc kubenswrapper[4757]: I1006 15:08:08.081840 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Oct 06 15:08:09 crc kubenswrapper[4757]: I1006 15:08:09.095591 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"8ba32982-b0f0-4865-9bd6-b249bbbfafd2","Type":"ContainerStarted","Data":"ff09f7649cc77118459e94e4f60eddd64b418fc0fdb2205e45e9b60e4ae8caa7"} Oct 06 15:08:09 crc kubenswrapper[4757]: I1006 15:08:09.095963 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"8ba32982-b0f0-4865-9bd6-b249bbbfafd2","Type":"ContainerStarted","Data":"e2e1f6cf16a37781339153e1f09fdb92bfc4ec7dfe24eb8a434894cc3ff39f26"} Oct 06 15:08:09 crc kubenswrapper[4757]: I1006 15:08:09.123831 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=2.123766893 podStartE2EDuration="2.123766893s" podCreationTimestamp="2025-10-06 15:08:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 15:08:09.11841659 +0000 UTC m=+5377.615735147" watchObservedRunningTime="2025-10-06 15:08:09.123766893 +0000 UTC m=+5377.621085460" Oct 06 15:08:37 crc kubenswrapper[4757]: I1006 15:08:37.731547 4757 scope.go:117] "RemoveContainer" containerID="a5ff5f6d3972e9dee3bef20d2bb5b1ba46435b649139b30fe2f13f9799da229b" Oct 06 15:08:37 crc kubenswrapper[4757]: I1006 15:08:37.764967 4757 scope.go:117] "RemoveContainer" containerID="afed9789a9fa5bc388ef68470a9f84147b47e6c3cd95975e9e9cdf606e6fbb7f" Oct 06 15:08:37 crc kubenswrapper[4757]: I1006 15:08:37.821519 4757 scope.go:117] "RemoveContainer" containerID="8535c0c0769771e13a076bdf53d5cebf644897ca0c68b80d13fddfaaadba5f59" Oct 06 15:08:37 crc kubenswrapper[4757]: I1006 15:08:37.849491 4757 scope.go:117] "RemoveContainer" containerID="e03af2951b5d3267e8919e1b19c3f6655b6e0e4a06a8d2747494291ddd6dae20" Oct 06 15:08:37 crc kubenswrapper[4757]: I1006 15:08:37.886208 4757 scope.go:117] "RemoveContainer" containerID="15397b94b7d4fe5cef23a6f0aea4c769777e5955f7293e4fb752f3df064dc80b" Oct 06 15:09:04 crc kubenswrapper[4757]: I1006 15:09:04.361198 4757 patch_prober.go:28] interesting pod/machine-config-daemon-7tb7h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 06 15:09:04 crc kubenswrapper[4757]: I1006 15:09:04.361755 4757 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 06 15:09:26 crc kubenswrapper[4757]: I1006 15:09:26.550888 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-sqmxj"] Oct 06 15:09:26 crc kubenswrapper[4757]: I1006 15:09:26.553647 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-sqmxj" Oct 06 15:09:26 crc kubenswrapper[4757]: I1006 15:09:26.573791 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-sqmxj"] Oct 06 15:09:26 crc kubenswrapper[4757]: I1006 15:09:26.611728 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ce729b51-d233-4d10-9639-e6fbdd768c26-utilities\") pod \"certified-operators-sqmxj\" (UID: \"ce729b51-d233-4d10-9639-e6fbdd768c26\") " pod="openshift-marketplace/certified-operators-sqmxj" Oct 06 15:09:26 crc kubenswrapper[4757]: I1006 15:09:26.611779 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ce729b51-d233-4d10-9639-e6fbdd768c26-catalog-content\") pod \"certified-operators-sqmxj\" (UID: \"ce729b51-d233-4d10-9639-e6fbdd768c26\") " pod="openshift-marketplace/certified-operators-sqmxj" Oct 06 15:09:26 crc kubenswrapper[4757]: I1006 15:09:26.611924 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8tchz\" (UniqueName: \"kubernetes.io/projected/ce729b51-d233-4d10-9639-e6fbdd768c26-kube-api-access-8tchz\") pod \"certified-operators-sqmxj\" (UID: \"ce729b51-d233-4d10-9639-e6fbdd768c26\") " pod="openshift-marketplace/certified-operators-sqmxj" Oct 06 15:09:26 crc kubenswrapper[4757]: I1006 15:09:26.713968 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8tchz\" (UniqueName: \"kubernetes.io/projected/ce729b51-d233-4d10-9639-e6fbdd768c26-kube-api-access-8tchz\") pod \"certified-operators-sqmxj\" (UID: \"ce729b51-d233-4d10-9639-e6fbdd768c26\") " pod="openshift-marketplace/certified-operators-sqmxj" Oct 06 15:09:26 crc kubenswrapper[4757]: I1006 15:09:26.714211 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ce729b51-d233-4d10-9639-e6fbdd768c26-utilities\") pod \"certified-operators-sqmxj\" (UID: \"ce729b51-d233-4d10-9639-e6fbdd768c26\") " pod="openshift-marketplace/certified-operators-sqmxj" Oct 06 15:09:26 crc kubenswrapper[4757]: I1006 15:09:26.714274 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ce729b51-d233-4d10-9639-e6fbdd768c26-catalog-content\") pod \"certified-operators-sqmxj\" (UID: \"ce729b51-d233-4d10-9639-e6fbdd768c26\") " pod="openshift-marketplace/certified-operators-sqmxj" Oct 06 15:09:26 crc kubenswrapper[4757]: I1006 15:09:26.714801 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ce729b51-d233-4d10-9639-e6fbdd768c26-utilities\") pod \"certified-operators-sqmxj\" (UID: \"ce729b51-d233-4d10-9639-e6fbdd768c26\") " pod="openshift-marketplace/certified-operators-sqmxj" Oct 06 15:09:26 crc kubenswrapper[4757]: I1006 15:09:26.715232 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ce729b51-d233-4d10-9639-e6fbdd768c26-catalog-content\") pod \"certified-operators-sqmxj\" (UID: \"ce729b51-d233-4d10-9639-e6fbdd768c26\") " pod="openshift-marketplace/certified-operators-sqmxj" Oct 06 15:09:26 crc kubenswrapper[4757]: I1006 15:09:26.744765 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8tchz\" (UniqueName: \"kubernetes.io/projected/ce729b51-d233-4d10-9639-e6fbdd768c26-kube-api-access-8tchz\") pod \"certified-operators-sqmxj\" (UID: \"ce729b51-d233-4d10-9639-e6fbdd768c26\") " pod="openshift-marketplace/certified-operators-sqmxj" Oct 06 15:09:26 crc kubenswrapper[4757]: I1006 15:09:26.890117 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-sqmxj" Oct 06 15:09:27 crc kubenswrapper[4757]: I1006 15:09:27.368143 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-sqmxj"] Oct 06 15:09:27 crc kubenswrapper[4757]: I1006 15:09:27.882795 4757 generic.go:334] "Generic (PLEG): container finished" podID="ce729b51-d233-4d10-9639-e6fbdd768c26" containerID="933de472b6b34934d993387f06d1ffd935daba0fdd6942499051fecc0e7859bd" exitCode=0 Oct 06 15:09:27 crc kubenswrapper[4757]: I1006 15:09:27.882869 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-sqmxj" event={"ID":"ce729b51-d233-4d10-9639-e6fbdd768c26","Type":"ContainerDied","Data":"933de472b6b34934d993387f06d1ffd935daba0fdd6942499051fecc0e7859bd"} Oct 06 15:09:27 crc kubenswrapper[4757]: I1006 15:09:27.882912 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-sqmxj" event={"ID":"ce729b51-d233-4d10-9639-e6fbdd768c26","Type":"ContainerStarted","Data":"c3f9a540afb1a7b4374e05e851f68a16ba6a4076b69404dfcbe8795119f1dbdc"} Oct 06 15:09:27 crc kubenswrapper[4757]: I1006 15:09:27.887240 4757 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 06 15:09:28 crc kubenswrapper[4757]: I1006 15:09:28.915566 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-sqmxj" event={"ID":"ce729b51-d233-4d10-9639-e6fbdd768c26","Type":"ContainerStarted","Data":"dc3acb518d95f5921b918f29c245f7fd72733697ec5c6a77cf0e23c64a18025f"} Oct 06 15:09:29 crc kubenswrapper[4757]: I1006 15:09:29.927355 4757 generic.go:334] "Generic (PLEG): container finished" podID="ce729b51-d233-4d10-9639-e6fbdd768c26" containerID="dc3acb518d95f5921b918f29c245f7fd72733697ec5c6a77cf0e23c64a18025f" exitCode=0 Oct 06 15:09:29 crc kubenswrapper[4757]: I1006 15:09:29.927476 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-sqmxj" event={"ID":"ce729b51-d233-4d10-9639-e6fbdd768c26","Type":"ContainerDied","Data":"dc3acb518d95f5921b918f29c245f7fd72733697ec5c6a77cf0e23c64a18025f"} Oct 06 15:09:30 crc kubenswrapper[4757]: I1006 15:09:30.946700 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-sqmxj" event={"ID":"ce729b51-d233-4d10-9639-e6fbdd768c26","Type":"ContainerStarted","Data":"957508ac896a501311144a44f59d8412bb64bb437f6ca8b458ddd7b224693ab0"} Oct 06 15:09:30 crc kubenswrapper[4757]: I1006 15:09:30.974864 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-sqmxj" podStartSLOduration=2.48204539 podStartE2EDuration="4.974839581s" podCreationTimestamp="2025-10-06 15:09:26 +0000 UTC" firstStartedPulling="2025-10-06 15:09:27.885855967 +0000 UTC m=+5456.383174504" lastFinishedPulling="2025-10-06 15:09:30.378650158 +0000 UTC m=+5458.875968695" observedRunningTime="2025-10-06 15:09:30.967549475 +0000 UTC m=+5459.464868042" watchObservedRunningTime="2025-10-06 15:09:30.974839581 +0000 UTC m=+5459.472158118" Oct 06 15:09:34 crc kubenswrapper[4757]: I1006 15:09:34.361314 4757 patch_prober.go:28] interesting pod/machine-config-daemon-7tb7h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 06 15:09:34 crc kubenswrapper[4757]: I1006 15:09:34.361634 4757 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 06 15:09:36 crc kubenswrapper[4757]: I1006 15:09:36.890727 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-sqmxj" Oct 06 15:09:36 crc kubenswrapper[4757]: I1006 15:09:36.891106 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-sqmxj" Oct 06 15:09:36 crc kubenswrapper[4757]: I1006 15:09:36.944395 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-sqmxj" Oct 06 15:09:37 crc kubenswrapper[4757]: I1006 15:09:37.049484 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-sqmxj" Oct 06 15:09:37 crc kubenswrapper[4757]: I1006 15:09:37.190260 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-sqmxj"] Oct 06 15:09:37 crc kubenswrapper[4757]: I1006 15:09:37.986991 4757 scope.go:117] "RemoveContainer" containerID="37654d3d038afc23b92fe3a958f582e331756a1f5668ac5128cb8df7a7999dfc" Oct 06 15:09:38 crc kubenswrapper[4757]: I1006 15:09:38.031288 4757 scope.go:117] "RemoveContainer" containerID="7dda30fd00e79ca296467f36a6434b1326426000fd6045216956436574dae36c" Oct 06 15:09:39 crc kubenswrapper[4757]: I1006 15:09:39.030538 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-sqmxj" podUID="ce729b51-d233-4d10-9639-e6fbdd768c26" containerName="registry-server" containerID="cri-o://957508ac896a501311144a44f59d8412bb64bb437f6ca8b458ddd7b224693ab0" gracePeriod=2 Oct 06 15:09:39 crc kubenswrapper[4757]: I1006 15:09:39.492214 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-sqmxj" Oct 06 15:09:39 crc kubenswrapper[4757]: I1006 15:09:39.571841 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ce729b51-d233-4d10-9639-e6fbdd768c26-catalog-content\") pod \"ce729b51-d233-4d10-9639-e6fbdd768c26\" (UID: \"ce729b51-d233-4d10-9639-e6fbdd768c26\") " Oct 06 15:09:39 crc kubenswrapper[4757]: I1006 15:09:39.571915 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ce729b51-d233-4d10-9639-e6fbdd768c26-utilities\") pod \"ce729b51-d233-4d10-9639-e6fbdd768c26\" (UID: \"ce729b51-d233-4d10-9639-e6fbdd768c26\") " Oct 06 15:09:39 crc kubenswrapper[4757]: I1006 15:09:39.571966 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tchz\" (UniqueName: \"kubernetes.io/projected/ce729b51-d233-4d10-9639-e6fbdd768c26-kube-api-access-8tchz\") pod \"ce729b51-d233-4d10-9639-e6fbdd768c26\" (UID: \"ce729b51-d233-4d10-9639-e6fbdd768c26\") " Oct 06 15:09:39 crc kubenswrapper[4757]: I1006 15:09:39.573844 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ce729b51-d233-4d10-9639-e6fbdd768c26-utilities" (OuterVolumeSpecName: "utilities") pod "ce729b51-d233-4d10-9639-e6fbdd768c26" (UID: "ce729b51-d233-4d10-9639-e6fbdd768c26"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 15:09:39 crc kubenswrapper[4757]: I1006 15:09:39.582492 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ce729b51-d233-4d10-9639-e6fbdd768c26-kube-api-access-8tchz" (OuterVolumeSpecName: "kube-api-access-8tchz") pod "ce729b51-d233-4d10-9639-e6fbdd768c26" (UID: "ce729b51-d233-4d10-9639-e6fbdd768c26"). InnerVolumeSpecName "kube-api-access-8tchz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 15:09:39 crc kubenswrapper[4757]: I1006 15:09:39.673801 4757 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ce729b51-d233-4d10-9639-e6fbdd768c26-utilities\") on node \"crc\" DevicePath \"\"" Oct 06 15:09:39 crc kubenswrapper[4757]: I1006 15:09:39.674182 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tchz\" (UniqueName: \"kubernetes.io/projected/ce729b51-d233-4d10-9639-e6fbdd768c26-kube-api-access-8tchz\") on node \"crc\" DevicePath \"\"" Oct 06 15:09:39 crc kubenswrapper[4757]: I1006 15:09:39.788188 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ce729b51-d233-4d10-9639-e6fbdd768c26-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ce729b51-d233-4d10-9639-e6fbdd768c26" (UID: "ce729b51-d233-4d10-9639-e6fbdd768c26"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 15:09:39 crc kubenswrapper[4757]: I1006 15:09:39.877441 4757 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ce729b51-d233-4d10-9639-e6fbdd768c26-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 06 15:09:40 crc kubenswrapper[4757]: I1006 15:09:40.046166 4757 generic.go:334] "Generic (PLEG): container finished" podID="ce729b51-d233-4d10-9639-e6fbdd768c26" containerID="957508ac896a501311144a44f59d8412bb64bb437f6ca8b458ddd7b224693ab0" exitCode=0 Oct 06 15:09:40 crc kubenswrapper[4757]: I1006 15:09:40.046236 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-sqmxj" event={"ID":"ce729b51-d233-4d10-9639-e6fbdd768c26","Type":"ContainerDied","Data":"957508ac896a501311144a44f59d8412bb64bb437f6ca8b458ddd7b224693ab0"} Oct 06 15:09:40 crc kubenswrapper[4757]: I1006 15:09:40.046252 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-sqmxj" Oct 06 15:09:40 crc kubenswrapper[4757]: I1006 15:09:40.046287 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-sqmxj" event={"ID":"ce729b51-d233-4d10-9639-e6fbdd768c26","Type":"ContainerDied","Data":"c3f9a540afb1a7b4374e05e851f68a16ba6a4076b69404dfcbe8795119f1dbdc"} Oct 06 15:09:40 crc kubenswrapper[4757]: I1006 15:09:40.046317 4757 scope.go:117] "RemoveContainer" containerID="957508ac896a501311144a44f59d8412bb64bb437f6ca8b458ddd7b224693ab0" Oct 06 15:09:40 crc kubenswrapper[4757]: I1006 15:09:40.077676 4757 scope.go:117] "RemoveContainer" containerID="dc3acb518d95f5921b918f29c245f7fd72733697ec5c6a77cf0e23c64a18025f" Oct 06 15:09:40 crc kubenswrapper[4757]: I1006 15:09:40.091135 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-sqmxj"] Oct 06 15:09:40 crc kubenswrapper[4757]: I1006 15:09:40.099858 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-sqmxj"] Oct 06 15:09:40 crc kubenswrapper[4757]: I1006 15:09:40.117658 4757 scope.go:117] "RemoveContainer" containerID="933de472b6b34934d993387f06d1ffd935daba0fdd6942499051fecc0e7859bd" Oct 06 15:09:40 crc kubenswrapper[4757]: I1006 15:09:40.176329 4757 scope.go:117] "RemoveContainer" containerID="957508ac896a501311144a44f59d8412bb64bb437f6ca8b458ddd7b224693ab0" Oct 06 15:09:40 crc kubenswrapper[4757]: E1006 15:09:40.176760 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"957508ac896a501311144a44f59d8412bb64bb437f6ca8b458ddd7b224693ab0\": container with ID starting with 957508ac896a501311144a44f59d8412bb64bb437f6ca8b458ddd7b224693ab0 not found: ID does not exist" containerID="957508ac896a501311144a44f59d8412bb64bb437f6ca8b458ddd7b224693ab0" Oct 06 15:09:40 crc kubenswrapper[4757]: I1006 15:09:40.176798 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"957508ac896a501311144a44f59d8412bb64bb437f6ca8b458ddd7b224693ab0"} err="failed to get container status \"957508ac896a501311144a44f59d8412bb64bb437f6ca8b458ddd7b224693ab0\": rpc error: code = NotFound desc = could not find container \"957508ac896a501311144a44f59d8412bb64bb437f6ca8b458ddd7b224693ab0\": container with ID starting with 957508ac896a501311144a44f59d8412bb64bb437f6ca8b458ddd7b224693ab0 not found: ID does not exist" Oct 06 15:09:40 crc kubenswrapper[4757]: I1006 15:09:40.176825 4757 scope.go:117] "RemoveContainer" containerID="dc3acb518d95f5921b918f29c245f7fd72733697ec5c6a77cf0e23c64a18025f" Oct 06 15:09:40 crc kubenswrapper[4757]: E1006 15:09:40.180444 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dc3acb518d95f5921b918f29c245f7fd72733697ec5c6a77cf0e23c64a18025f\": container with ID starting with dc3acb518d95f5921b918f29c245f7fd72733697ec5c6a77cf0e23c64a18025f not found: ID does not exist" containerID="dc3acb518d95f5921b918f29c245f7fd72733697ec5c6a77cf0e23c64a18025f" Oct 06 15:09:40 crc kubenswrapper[4757]: I1006 15:09:40.180536 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dc3acb518d95f5921b918f29c245f7fd72733697ec5c6a77cf0e23c64a18025f"} err="failed to get container status \"dc3acb518d95f5921b918f29c245f7fd72733697ec5c6a77cf0e23c64a18025f\": rpc error: code = NotFound desc = could not find container \"dc3acb518d95f5921b918f29c245f7fd72733697ec5c6a77cf0e23c64a18025f\": container with ID starting with dc3acb518d95f5921b918f29c245f7fd72733697ec5c6a77cf0e23c64a18025f not found: ID does not exist" Oct 06 15:09:40 crc kubenswrapper[4757]: I1006 15:09:40.180601 4757 scope.go:117] "RemoveContainer" containerID="933de472b6b34934d993387f06d1ffd935daba0fdd6942499051fecc0e7859bd" Oct 06 15:09:40 crc kubenswrapper[4757]: E1006 15:09:40.182041 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"933de472b6b34934d993387f06d1ffd935daba0fdd6942499051fecc0e7859bd\": container with ID starting with 933de472b6b34934d993387f06d1ffd935daba0fdd6942499051fecc0e7859bd not found: ID does not exist" containerID="933de472b6b34934d993387f06d1ffd935daba0fdd6942499051fecc0e7859bd" Oct 06 15:09:40 crc kubenswrapper[4757]: I1006 15:09:40.182073 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"933de472b6b34934d993387f06d1ffd935daba0fdd6942499051fecc0e7859bd"} err="failed to get container status \"933de472b6b34934d993387f06d1ffd935daba0fdd6942499051fecc0e7859bd\": rpc error: code = NotFound desc = could not find container \"933de472b6b34934d993387f06d1ffd935daba0fdd6942499051fecc0e7859bd\": container with ID starting with 933de472b6b34934d993387f06d1ffd935daba0fdd6942499051fecc0e7859bd not found: ID does not exist" Oct 06 15:09:40 crc kubenswrapper[4757]: I1006 15:09:40.195677 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ce729b51-d233-4d10-9639-e6fbdd768c26" path="/var/lib/kubelet/pods/ce729b51-d233-4d10-9639-e6fbdd768c26/volumes" Oct 06 15:09:40 crc kubenswrapper[4757]: E1006 15:09:40.289539 4757 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podce729b51_d233_4d10_9639_e6fbdd768c26.slice/crio-c3f9a540afb1a7b4374e05e851f68a16ba6a4076b69404dfcbe8795119f1dbdc\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podce729b51_d233_4d10_9639_e6fbdd768c26.slice\": RecentStats: unable to find data in memory cache]" Oct 06 15:09:41 crc kubenswrapper[4757]: I1006 15:09:41.170393 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-xc9j5"] Oct 06 15:09:41 crc kubenswrapper[4757]: E1006 15:09:41.170710 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ce729b51-d233-4d10-9639-e6fbdd768c26" containerName="extract-utilities" Oct 06 15:09:41 crc kubenswrapper[4757]: I1006 15:09:41.170724 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="ce729b51-d233-4d10-9639-e6fbdd768c26" containerName="extract-utilities" Oct 06 15:09:41 crc kubenswrapper[4757]: E1006 15:09:41.170741 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ce729b51-d233-4d10-9639-e6fbdd768c26" containerName="extract-content" Oct 06 15:09:41 crc kubenswrapper[4757]: I1006 15:09:41.170747 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="ce729b51-d233-4d10-9639-e6fbdd768c26" containerName="extract-content" Oct 06 15:09:41 crc kubenswrapper[4757]: E1006 15:09:41.170766 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ce729b51-d233-4d10-9639-e6fbdd768c26" containerName="registry-server" Oct 06 15:09:41 crc kubenswrapper[4757]: I1006 15:09:41.170773 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="ce729b51-d233-4d10-9639-e6fbdd768c26" containerName="registry-server" Oct 06 15:09:41 crc kubenswrapper[4757]: I1006 15:09:41.170933 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="ce729b51-d233-4d10-9639-e6fbdd768c26" containerName="registry-server" Oct 06 15:09:41 crc kubenswrapper[4757]: I1006 15:09:41.171472 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-xc9j5" Oct 06 15:09:41 crc kubenswrapper[4757]: I1006 15:09:41.183267 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-xc9j5"] Oct 06 15:09:41 crc kubenswrapper[4757]: I1006 15:09:41.301457 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2lmgg\" (UniqueName: \"kubernetes.io/projected/8aceb73c-695c-45ef-a519-3648dc1defda-kube-api-access-2lmgg\") pod \"barbican-db-create-xc9j5\" (UID: \"8aceb73c-695c-45ef-a519-3648dc1defda\") " pod="openstack/barbican-db-create-xc9j5" Oct 06 15:09:41 crc kubenswrapper[4757]: I1006 15:09:41.403422 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2lmgg\" (UniqueName: \"kubernetes.io/projected/8aceb73c-695c-45ef-a519-3648dc1defda-kube-api-access-2lmgg\") pod \"barbican-db-create-xc9j5\" (UID: \"8aceb73c-695c-45ef-a519-3648dc1defda\") " pod="openstack/barbican-db-create-xc9j5" Oct 06 15:09:41 crc kubenswrapper[4757]: I1006 15:09:41.424635 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2lmgg\" (UniqueName: \"kubernetes.io/projected/8aceb73c-695c-45ef-a519-3648dc1defda-kube-api-access-2lmgg\") pod \"barbican-db-create-xc9j5\" (UID: \"8aceb73c-695c-45ef-a519-3648dc1defda\") " pod="openstack/barbican-db-create-xc9j5" Oct 06 15:09:41 crc kubenswrapper[4757]: I1006 15:09:41.490089 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-xc9j5" Oct 06 15:09:41 crc kubenswrapper[4757]: I1006 15:09:41.955969 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-xc9j5"] Oct 06 15:09:42 crc kubenswrapper[4757]: I1006 15:09:42.063167 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-xc9j5" event={"ID":"8aceb73c-695c-45ef-a519-3648dc1defda","Type":"ContainerStarted","Data":"3d8696808ca32d2ff3c26469df31ae7c328ce72a2b8bfa8f10b675f32044c993"} Oct 06 15:09:43 crc kubenswrapper[4757]: I1006 15:09:43.078356 4757 generic.go:334] "Generic (PLEG): container finished" podID="8aceb73c-695c-45ef-a519-3648dc1defda" containerID="d5b021e6d04126c00c4162d73e7573d2199f9679817b63f48013fb7181f5e4ba" exitCode=0 Oct 06 15:09:43 crc kubenswrapper[4757]: I1006 15:09:43.078452 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-xc9j5" event={"ID":"8aceb73c-695c-45ef-a519-3648dc1defda","Type":"ContainerDied","Data":"d5b021e6d04126c00c4162d73e7573d2199f9679817b63f48013fb7181f5e4ba"} Oct 06 15:09:44 crc kubenswrapper[4757]: I1006 15:09:44.537298 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-xc9j5" Oct 06 15:09:44 crc kubenswrapper[4757]: I1006 15:09:44.676280 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2lmgg\" (UniqueName: \"kubernetes.io/projected/8aceb73c-695c-45ef-a519-3648dc1defda-kube-api-access-2lmgg\") pod \"8aceb73c-695c-45ef-a519-3648dc1defda\" (UID: \"8aceb73c-695c-45ef-a519-3648dc1defda\") " Oct 06 15:09:44 crc kubenswrapper[4757]: I1006 15:09:44.681751 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8aceb73c-695c-45ef-a519-3648dc1defda-kube-api-access-2lmgg" (OuterVolumeSpecName: "kube-api-access-2lmgg") pod "8aceb73c-695c-45ef-a519-3648dc1defda" (UID: "8aceb73c-695c-45ef-a519-3648dc1defda"). InnerVolumeSpecName "kube-api-access-2lmgg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 15:09:44 crc kubenswrapper[4757]: I1006 15:09:44.779962 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2lmgg\" (UniqueName: \"kubernetes.io/projected/8aceb73c-695c-45ef-a519-3648dc1defda-kube-api-access-2lmgg\") on node \"crc\" DevicePath \"\"" Oct 06 15:09:45 crc kubenswrapper[4757]: I1006 15:09:45.095691 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-xc9j5" event={"ID":"8aceb73c-695c-45ef-a519-3648dc1defda","Type":"ContainerDied","Data":"3d8696808ca32d2ff3c26469df31ae7c328ce72a2b8bfa8f10b675f32044c993"} Oct 06 15:09:45 crc kubenswrapper[4757]: I1006 15:09:45.095725 4757 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3d8696808ca32d2ff3c26469df31ae7c328ce72a2b8bfa8f10b675f32044c993" Oct 06 15:09:45 crc kubenswrapper[4757]: I1006 15:09:45.095778 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-xc9j5" Oct 06 15:09:51 crc kubenswrapper[4757]: I1006 15:09:51.193298 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-5168-account-create-f2vck"] Oct 06 15:09:51 crc kubenswrapper[4757]: E1006 15:09:51.194386 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8aceb73c-695c-45ef-a519-3648dc1defda" containerName="mariadb-database-create" Oct 06 15:09:51 crc kubenswrapper[4757]: I1006 15:09:51.194405 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="8aceb73c-695c-45ef-a519-3648dc1defda" containerName="mariadb-database-create" Oct 06 15:09:51 crc kubenswrapper[4757]: I1006 15:09:51.194634 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="8aceb73c-695c-45ef-a519-3648dc1defda" containerName="mariadb-database-create" Oct 06 15:09:51 crc kubenswrapper[4757]: I1006 15:09:51.195302 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-5168-account-create-f2vck" Oct 06 15:09:51 crc kubenswrapper[4757]: I1006 15:09:51.198454 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Oct 06 15:09:51 crc kubenswrapper[4757]: I1006 15:09:51.201174 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-5168-account-create-f2vck"] Oct 06 15:09:51 crc kubenswrapper[4757]: I1006 15:09:51.297395 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zcdbh\" (UniqueName: \"kubernetes.io/projected/3eafe25b-cbf4-46ea-9b52-4b8244574443-kube-api-access-zcdbh\") pod \"barbican-5168-account-create-f2vck\" (UID: \"3eafe25b-cbf4-46ea-9b52-4b8244574443\") " pod="openstack/barbican-5168-account-create-f2vck" Oct 06 15:09:51 crc kubenswrapper[4757]: I1006 15:09:51.399276 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zcdbh\" (UniqueName: \"kubernetes.io/projected/3eafe25b-cbf4-46ea-9b52-4b8244574443-kube-api-access-zcdbh\") pod \"barbican-5168-account-create-f2vck\" (UID: \"3eafe25b-cbf4-46ea-9b52-4b8244574443\") " pod="openstack/barbican-5168-account-create-f2vck" Oct 06 15:09:51 crc kubenswrapper[4757]: I1006 15:09:51.429957 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zcdbh\" (UniqueName: \"kubernetes.io/projected/3eafe25b-cbf4-46ea-9b52-4b8244574443-kube-api-access-zcdbh\") pod \"barbican-5168-account-create-f2vck\" (UID: \"3eafe25b-cbf4-46ea-9b52-4b8244574443\") " pod="openstack/barbican-5168-account-create-f2vck" Oct 06 15:09:51 crc kubenswrapper[4757]: I1006 15:09:51.529249 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-5168-account-create-f2vck" Oct 06 15:09:51 crc kubenswrapper[4757]: I1006 15:09:51.835909 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-5168-account-create-f2vck"] Oct 06 15:09:51 crc kubenswrapper[4757]: W1006 15:09:51.841419 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3eafe25b_cbf4_46ea_9b52_4b8244574443.slice/crio-295544c8e788ea68e8f7d74c38b22c6a1fb2c837a4d15641bf27c62cdd7ad40c WatchSource:0}: Error finding container 295544c8e788ea68e8f7d74c38b22c6a1fb2c837a4d15641bf27c62cdd7ad40c: Status 404 returned error can't find the container with id 295544c8e788ea68e8f7d74c38b22c6a1fb2c837a4d15641bf27c62cdd7ad40c Oct 06 15:09:52 crc kubenswrapper[4757]: I1006 15:09:52.164404 4757 generic.go:334] "Generic (PLEG): container finished" podID="3eafe25b-cbf4-46ea-9b52-4b8244574443" containerID="49204f1162ba02c3146fb27401755b6f19b059976c5610b970449461bec2c4c9" exitCode=0 Oct 06 15:09:52 crc kubenswrapper[4757]: I1006 15:09:52.164455 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-5168-account-create-f2vck" event={"ID":"3eafe25b-cbf4-46ea-9b52-4b8244574443","Type":"ContainerDied","Data":"49204f1162ba02c3146fb27401755b6f19b059976c5610b970449461bec2c4c9"} Oct 06 15:09:52 crc kubenswrapper[4757]: I1006 15:09:52.164704 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-5168-account-create-f2vck" event={"ID":"3eafe25b-cbf4-46ea-9b52-4b8244574443","Type":"ContainerStarted","Data":"295544c8e788ea68e8f7d74c38b22c6a1fb2c837a4d15641bf27c62cdd7ad40c"} Oct 06 15:09:53 crc kubenswrapper[4757]: I1006 15:09:53.664194 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-5168-account-create-f2vck" Oct 06 15:09:53 crc kubenswrapper[4757]: I1006 15:09:53.745643 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zcdbh\" (UniqueName: \"kubernetes.io/projected/3eafe25b-cbf4-46ea-9b52-4b8244574443-kube-api-access-zcdbh\") pod \"3eafe25b-cbf4-46ea-9b52-4b8244574443\" (UID: \"3eafe25b-cbf4-46ea-9b52-4b8244574443\") " Oct 06 15:09:53 crc kubenswrapper[4757]: I1006 15:09:53.750996 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3eafe25b-cbf4-46ea-9b52-4b8244574443-kube-api-access-zcdbh" (OuterVolumeSpecName: "kube-api-access-zcdbh") pod "3eafe25b-cbf4-46ea-9b52-4b8244574443" (UID: "3eafe25b-cbf4-46ea-9b52-4b8244574443"). InnerVolumeSpecName "kube-api-access-zcdbh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 15:09:53 crc kubenswrapper[4757]: I1006 15:09:53.847146 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zcdbh\" (UniqueName: \"kubernetes.io/projected/3eafe25b-cbf4-46ea-9b52-4b8244574443-kube-api-access-zcdbh\") on node \"crc\" DevicePath \"\"" Oct 06 15:09:54 crc kubenswrapper[4757]: I1006 15:09:54.203851 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-5168-account-create-f2vck" event={"ID":"3eafe25b-cbf4-46ea-9b52-4b8244574443","Type":"ContainerDied","Data":"295544c8e788ea68e8f7d74c38b22c6a1fb2c837a4d15641bf27c62cdd7ad40c"} Oct 06 15:09:54 crc kubenswrapper[4757]: I1006 15:09:54.203904 4757 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="295544c8e788ea68e8f7d74c38b22c6a1fb2c837a4d15641bf27c62cdd7ad40c" Oct 06 15:09:54 crc kubenswrapper[4757]: I1006 15:09:54.203962 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-5168-account-create-f2vck" Oct 06 15:09:56 crc kubenswrapper[4757]: I1006 15:09:56.487649 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-tl5mh"] Oct 06 15:09:56 crc kubenswrapper[4757]: E1006 15:09:56.488575 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3eafe25b-cbf4-46ea-9b52-4b8244574443" containerName="mariadb-account-create" Oct 06 15:09:56 crc kubenswrapper[4757]: I1006 15:09:56.488592 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="3eafe25b-cbf4-46ea-9b52-4b8244574443" containerName="mariadb-account-create" Oct 06 15:09:56 crc kubenswrapper[4757]: I1006 15:09:56.488768 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="3eafe25b-cbf4-46ea-9b52-4b8244574443" containerName="mariadb-account-create" Oct 06 15:09:56 crc kubenswrapper[4757]: I1006 15:09:56.489451 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-tl5mh" Oct 06 15:09:56 crc kubenswrapper[4757]: I1006 15:09:56.491376 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Oct 06 15:09:56 crc kubenswrapper[4757]: I1006 15:09:56.491845 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-xmmqv" Oct 06 15:09:56 crc kubenswrapper[4757]: I1006 15:09:56.501701 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-tl5mh"] Oct 06 15:09:56 crc kubenswrapper[4757]: I1006 15:09:56.598542 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1d640aee-a2ac-4541-9a43-6db175d7ac71-combined-ca-bundle\") pod \"barbican-db-sync-tl5mh\" (UID: \"1d640aee-a2ac-4541-9a43-6db175d7ac71\") " pod="openstack/barbican-db-sync-tl5mh" Oct 06 15:09:56 crc kubenswrapper[4757]: I1006 15:09:56.598655 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-slb8r\" (UniqueName: \"kubernetes.io/projected/1d640aee-a2ac-4541-9a43-6db175d7ac71-kube-api-access-slb8r\") pod \"barbican-db-sync-tl5mh\" (UID: \"1d640aee-a2ac-4541-9a43-6db175d7ac71\") " pod="openstack/barbican-db-sync-tl5mh" Oct 06 15:09:56 crc kubenswrapper[4757]: I1006 15:09:56.598709 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/1d640aee-a2ac-4541-9a43-6db175d7ac71-db-sync-config-data\") pod \"barbican-db-sync-tl5mh\" (UID: \"1d640aee-a2ac-4541-9a43-6db175d7ac71\") " pod="openstack/barbican-db-sync-tl5mh" Oct 06 15:09:56 crc kubenswrapper[4757]: I1006 15:09:56.700273 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1d640aee-a2ac-4541-9a43-6db175d7ac71-combined-ca-bundle\") pod \"barbican-db-sync-tl5mh\" (UID: \"1d640aee-a2ac-4541-9a43-6db175d7ac71\") " pod="openstack/barbican-db-sync-tl5mh" Oct 06 15:09:56 crc kubenswrapper[4757]: I1006 15:09:56.700730 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-slb8r\" (UniqueName: \"kubernetes.io/projected/1d640aee-a2ac-4541-9a43-6db175d7ac71-kube-api-access-slb8r\") pod \"barbican-db-sync-tl5mh\" (UID: \"1d640aee-a2ac-4541-9a43-6db175d7ac71\") " pod="openstack/barbican-db-sync-tl5mh" Oct 06 15:09:56 crc kubenswrapper[4757]: I1006 15:09:56.700796 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/1d640aee-a2ac-4541-9a43-6db175d7ac71-db-sync-config-data\") pod \"barbican-db-sync-tl5mh\" (UID: \"1d640aee-a2ac-4541-9a43-6db175d7ac71\") " pod="openstack/barbican-db-sync-tl5mh" Oct 06 15:09:56 crc kubenswrapper[4757]: I1006 15:09:56.709163 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/1d640aee-a2ac-4541-9a43-6db175d7ac71-db-sync-config-data\") pod \"barbican-db-sync-tl5mh\" (UID: \"1d640aee-a2ac-4541-9a43-6db175d7ac71\") " pod="openstack/barbican-db-sync-tl5mh" Oct 06 15:09:56 crc kubenswrapper[4757]: I1006 15:09:56.712518 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1d640aee-a2ac-4541-9a43-6db175d7ac71-combined-ca-bundle\") pod \"barbican-db-sync-tl5mh\" (UID: \"1d640aee-a2ac-4541-9a43-6db175d7ac71\") " pod="openstack/barbican-db-sync-tl5mh" Oct 06 15:09:56 crc kubenswrapper[4757]: I1006 15:09:56.734710 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-slb8r\" (UniqueName: \"kubernetes.io/projected/1d640aee-a2ac-4541-9a43-6db175d7ac71-kube-api-access-slb8r\") pod \"barbican-db-sync-tl5mh\" (UID: \"1d640aee-a2ac-4541-9a43-6db175d7ac71\") " pod="openstack/barbican-db-sync-tl5mh" Oct 06 15:09:56 crc kubenswrapper[4757]: I1006 15:09:56.806995 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-tl5mh" Oct 06 15:09:57 crc kubenswrapper[4757]: I1006 15:09:57.270119 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-tl5mh"] Oct 06 15:09:58 crc kubenswrapper[4757]: I1006 15:09:58.242421 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-tl5mh" event={"ID":"1d640aee-a2ac-4541-9a43-6db175d7ac71","Type":"ContainerStarted","Data":"6d022086a00e786b030c46c45b829918864c63bc7f168fba029e1f93129a7550"} Oct 06 15:09:58 crc kubenswrapper[4757]: I1006 15:09:58.242811 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-tl5mh" event={"ID":"1d640aee-a2ac-4541-9a43-6db175d7ac71","Type":"ContainerStarted","Data":"b618edb3a6e998a1accdf04b39f10a1935af95627d005fb0e21a062fb5e05ae6"} Oct 06 15:09:58 crc kubenswrapper[4757]: I1006 15:09:58.266302 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-tl5mh" podStartSLOduration=2.266282649 podStartE2EDuration="2.266282649s" podCreationTimestamp="2025-10-06 15:09:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 15:09:58.256365919 +0000 UTC m=+5486.753684466" watchObservedRunningTime="2025-10-06 15:09:58.266282649 +0000 UTC m=+5486.763601196" Oct 06 15:10:00 crc kubenswrapper[4757]: I1006 15:10:00.259658 4757 generic.go:334] "Generic (PLEG): container finished" podID="1d640aee-a2ac-4541-9a43-6db175d7ac71" containerID="6d022086a00e786b030c46c45b829918864c63bc7f168fba029e1f93129a7550" exitCode=0 Oct 06 15:10:00 crc kubenswrapper[4757]: I1006 15:10:00.259713 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-tl5mh" event={"ID":"1d640aee-a2ac-4541-9a43-6db175d7ac71","Type":"ContainerDied","Data":"6d022086a00e786b030c46c45b829918864c63bc7f168fba029e1f93129a7550"} Oct 06 15:10:01 crc kubenswrapper[4757]: I1006 15:10:01.638516 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-tl5mh" Oct 06 15:10:01 crc kubenswrapper[4757]: I1006 15:10:01.799627 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/1d640aee-a2ac-4541-9a43-6db175d7ac71-db-sync-config-data\") pod \"1d640aee-a2ac-4541-9a43-6db175d7ac71\" (UID: \"1d640aee-a2ac-4541-9a43-6db175d7ac71\") " Oct 06 15:10:01 crc kubenswrapper[4757]: I1006 15:10:01.799790 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-slb8r\" (UniqueName: \"kubernetes.io/projected/1d640aee-a2ac-4541-9a43-6db175d7ac71-kube-api-access-slb8r\") pod \"1d640aee-a2ac-4541-9a43-6db175d7ac71\" (UID: \"1d640aee-a2ac-4541-9a43-6db175d7ac71\") " Oct 06 15:10:01 crc kubenswrapper[4757]: I1006 15:10:01.799822 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1d640aee-a2ac-4541-9a43-6db175d7ac71-combined-ca-bundle\") pod \"1d640aee-a2ac-4541-9a43-6db175d7ac71\" (UID: \"1d640aee-a2ac-4541-9a43-6db175d7ac71\") " Oct 06 15:10:01 crc kubenswrapper[4757]: I1006 15:10:01.805910 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d640aee-a2ac-4541-9a43-6db175d7ac71-kube-api-access-slb8r" (OuterVolumeSpecName: "kube-api-access-slb8r") pod "1d640aee-a2ac-4541-9a43-6db175d7ac71" (UID: "1d640aee-a2ac-4541-9a43-6db175d7ac71"). InnerVolumeSpecName "kube-api-access-slb8r". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 15:10:01 crc kubenswrapper[4757]: I1006 15:10:01.805977 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1d640aee-a2ac-4541-9a43-6db175d7ac71-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "1d640aee-a2ac-4541-9a43-6db175d7ac71" (UID: "1d640aee-a2ac-4541-9a43-6db175d7ac71"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 15:10:01 crc kubenswrapper[4757]: I1006 15:10:01.831757 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1d640aee-a2ac-4541-9a43-6db175d7ac71-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1d640aee-a2ac-4541-9a43-6db175d7ac71" (UID: "1d640aee-a2ac-4541-9a43-6db175d7ac71"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 15:10:01 crc kubenswrapper[4757]: I1006 15:10:01.902452 4757 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/1d640aee-a2ac-4541-9a43-6db175d7ac71-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Oct 06 15:10:01 crc kubenswrapper[4757]: I1006 15:10:01.902507 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-slb8r\" (UniqueName: \"kubernetes.io/projected/1d640aee-a2ac-4541-9a43-6db175d7ac71-kube-api-access-slb8r\") on node \"crc\" DevicePath \"\"" Oct 06 15:10:01 crc kubenswrapper[4757]: I1006 15:10:01.902529 4757 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1d640aee-a2ac-4541-9a43-6db175d7ac71-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 06 15:10:02 crc kubenswrapper[4757]: I1006 15:10:02.275716 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-tl5mh" event={"ID":"1d640aee-a2ac-4541-9a43-6db175d7ac71","Type":"ContainerDied","Data":"b618edb3a6e998a1accdf04b39f10a1935af95627d005fb0e21a062fb5e05ae6"} Oct 06 15:10:02 crc kubenswrapper[4757]: I1006 15:10:02.275759 4757 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b618edb3a6e998a1accdf04b39f10a1935af95627d005fb0e21a062fb5e05ae6" Oct 06 15:10:02 crc kubenswrapper[4757]: I1006 15:10:02.275790 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-tl5mh" Oct 06 15:10:02 crc kubenswrapper[4757]: I1006 15:10:02.507553 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-8558c89d89-26z6j"] Oct 06 15:10:02 crc kubenswrapper[4757]: E1006 15:10:02.515685 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d640aee-a2ac-4541-9a43-6db175d7ac71" containerName="barbican-db-sync" Oct 06 15:10:02 crc kubenswrapper[4757]: I1006 15:10:02.515732 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d640aee-a2ac-4541-9a43-6db175d7ac71" containerName="barbican-db-sync" Oct 06 15:10:02 crc kubenswrapper[4757]: I1006 15:10:02.516025 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="1d640aee-a2ac-4541-9a43-6db175d7ac71" containerName="barbican-db-sync" Oct 06 15:10:02 crc kubenswrapper[4757]: I1006 15:10:02.517325 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-8558c89d89-26z6j" Oct 06 15:10:02 crc kubenswrapper[4757]: I1006 15:10:02.518697 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-76b567cf98-2ljlg"] Oct 06 15:10:02 crc kubenswrapper[4757]: I1006 15:10:02.520079 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-76b567cf98-2ljlg" Oct 06 15:10:02 crc kubenswrapper[4757]: I1006 15:10:02.523875 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-xmmqv" Oct 06 15:10:02 crc kubenswrapper[4757]: I1006 15:10:02.524089 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Oct 06 15:10:02 crc kubenswrapper[4757]: I1006 15:10:02.524811 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Oct 06 15:10:02 crc kubenswrapper[4757]: I1006 15:10:02.525045 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Oct 06 15:10:02 crc kubenswrapper[4757]: I1006 15:10:02.525622 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-8558c89d89-26z6j"] Oct 06 15:10:02 crc kubenswrapper[4757]: I1006 15:10:02.541923 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-76b567cf98-2ljlg"] Oct 06 15:10:02 crc kubenswrapper[4757]: I1006 15:10:02.584117 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-66ddf4b5cc-2n29n"] Oct 06 15:10:02 crc kubenswrapper[4757]: I1006 15:10:02.585846 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-66ddf4b5cc-2n29n" Oct 06 15:10:02 crc kubenswrapper[4757]: I1006 15:10:02.614819 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d48cb0f6-a038-4b42-8414-89fd43612859-config-data-custom\") pod \"barbican-worker-8558c89d89-26z6j\" (UID: \"d48cb0f6-a038-4b42-8414-89fd43612859\") " pod="openstack/barbican-worker-8558c89d89-26z6j" Oct 06 15:10:02 crc kubenswrapper[4757]: I1006 15:10:02.614906 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d48cb0f6-a038-4b42-8414-89fd43612859-combined-ca-bundle\") pod \"barbican-worker-8558c89d89-26z6j\" (UID: \"d48cb0f6-a038-4b42-8414-89fd43612859\") " pod="openstack/barbican-worker-8558c89d89-26z6j" Oct 06 15:10:02 crc kubenswrapper[4757]: I1006 15:10:02.614930 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/96d85f17-d1f2-4f91-a644-0959da17f29e-config-data-custom\") pod \"barbican-keystone-listener-76b567cf98-2ljlg\" (UID: \"96d85f17-d1f2-4f91-a644-0959da17f29e\") " pod="openstack/barbican-keystone-listener-76b567cf98-2ljlg" Oct 06 15:10:02 crc kubenswrapper[4757]: I1006 15:10:02.614949 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/96d85f17-d1f2-4f91-a644-0959da17f29e-logs\") pod \"barbican-keystone-listener-76b567cf98-2ljlg\" (UID: \"96d85f17-d1f2-4f91-a644-0959da17f29e\") " pod="openstack/barbican-keystone-listener-76b567cf98-2ljlg" Oct 06 15:10:02 crc kubenswrapper[4757]: I1006 15:10:02.615018 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d48cb0f6-a038-4b42-8414-89fd43612859-config-data\") pod \"barbican-worker-8558c89d89-26z6j\" (UID: \"d48cb0f6-a038-4b42-8414-89fd43612859\") " pod="openstack/barbican-worker-8558c89d89-26z6j" Oct 06 15:10:02 crc kubenswrapper[4757]: I1006 15:10:02.615037 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/96d85f17-d1f2-4f91-a644-0959da17f29e-config-data\") pod \"barbican-keystone-listener-76b567cf98-2ljlg\" (UID: \"96d85f17-d1f2-4f91-a644-0959da17f29e\") " pod="openstack/barbican-keystone-listener-76b567cf98-2ljlg" Oct 06 15:10:02 crc kubenswrapper[4757]: I1006 15:10:02.615068 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v5w6m\" (UniqueName: \"kubernetes.io/projected/96d85f17-d1f2-4f91-a644-0959da17f29e-kube-api-access-v5w6m\") pod \"barbican-keystone-listener-76b567cf98-2ljlg\" (UID: \"96d85f17-d1f2-4f91-a644-0959da17f29e\") " pod="openstack/barbican-keystone-listener-76b567cf98-2ljlg" Oct 06 15:10:02 crc kubenswrapper[4757]: I1006 15:10:02.615162 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fvc8w\" (UniqueName: \"kubernetes.io/projected/d48cb0f6-a038-4b42-8414-89fd43612859-kube-api-access-fvc8w\") pod \"barbican-worker-8558c89d89-26z6j\" (UID: \"d48cb0f6-a038-4b42-8414-89fd43612859\") " pod="openstack/barbican-worker-8558c89d89-26z6j" Oct 06 15:10:02 crc kubenswrapper[4757]: I1006 15:10:02.615182 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96d85f17-d1f2-4f91-a644-0959da17f29e-combined-ca-bundle\") pod \"barbican-keystone-listener-76b567cf98-2ljlg\" (UID: \"96d85f17-d1f2-4f91-a644-0959da17f29e\") " pod="openstack/barbican-keystone-listener-76b567cf98-2ljlg" Oct 06 15:10:02 crc kubenswrapper[4757]: I1006 15:10:02.615230 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d48cb0f6-a038-4b42-8414-89fd43612859-logs\") pod \"barbican-worker-8558c89d89-26z6j\" (UID: \"d48cb0f6-a038-4b42-8414-89fd43612859\") " pod="openstack/barbican-worker-8558c89d89-26z6j" Oct 06 15:10:02 crc kubenswrapper[4757]: I1006 15:10:02.634134 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-66ddf4b5cc-2n29n"] Oct 06 15:10:02 crc kubenswrapper[4757]: I1006 15:10:02.711374 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-5fbd45848d-wkv66"] Oct 06 15:10:02 crc kubenswrapper[4757]: I1006 15:10:02.713087 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5fbd45848d-wkv66" Oct 06 15:10:02 crc kubenswrapper[4757]: I1006 15:10:02.715185 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Oct 06 15:10:02 crc kubenswrapper[4757]: I1006 15:10:02.720470 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-5fbd45848d-wkv66"] Oct 06 15:10:02 crc kubenswrapper[4757]: I1006 15:10:02.724503 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v5w6m\" (UniqueName: \"kubernetes.io/projected/96d85f17-d1f2-4f91-a644-0959da17f29e-kube-api-access-v5w6m\") pod \"barbican-keystone-listener-76b567cf98-2ljlg\" (UID: \"96d85f17-d1f2-4f91-a644-0959da17f29e\") " pod="openstack/barbican-keystone-listener-76b567cf98-2ljlg" Oct 06 15:10:02 crc kubenswrapper[4757]: I1006 15:10:02.724565 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5928ff8b-751a-411b-8705-9564341390c3-dns-svc\") pod \"dnsmasq-dns-66ddf4b5cc-2n29n\" (UID: \"5928ff8b-751a-411b-8705-9564341390c3\") " pod="openstack/dnsmasq-dns-66ddf4b5cc-2n29n" Oct 06 15:10:02 crc kubenswrapper[4757]: I1006 15:10:02.724592 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fvc8w\" (UniqueName: \"kubernetes.io/projected/d48cb0f6-a038-4b42-8414-89fd43612859-kube-api-access-fvc8w\") pod \"barbican-worker-8558c89d89-26z6j\" (UID: \"d48cb0f6-a038-4b42-8414-89fd43612859\") " pod="openstack/barbican-worker-8558c89d89-26z6j" Oct 06 15:10:02 crc kubenswrapper[4757]: I1006 15:10:02.724618 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96d85f17-d1f2-4f91-a644-0959da17f29e-combined-ca-bundle\") pod \"barbican-keystone-listener-76b567cf98-2ljlg\" (UID: \"96d85f17-d1f2-4f91-a644-0959da17f29e\") " pod="openstack/barbican-keystone-listener-76b567cf98-2ljlg" Oct 06 15:10:02 crc kubenswrapper[4757]: I1006 15:10:02.724710 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d48cb0f6-a038-4b42-8414-89fd43612859-logs\") pod \"barbican-worker-8558c89d89-26z6j\" (UID: \"d48cb0f6-a038-4b42-8414-89fd43612859\") " pod="openstack/barbican-worker-8558c89d89-26z6j" Oct 06 15:10:02 crc kubenswrapper[4757]: I1006 15:10:02.724735 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5928ff8b-751a-411b-8705-9564341390c3-config\") pod \"dnsmasq-dns-66ddf4b5cc-2n29n\" (UID: \"5928ff8b-751a-411b-8705-9564341390c3\") " pod="openstack/dnsmasq-dns-66ddf4b5cc-2n29n" Oct 06 15:10:02 crc kubenswrapper[4757]: I1006 15:10:02.724754 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wfbzr\" (UniqueName: \"kubernetes.io/projected/5928ff8b-751a-411b-8705-9564341390c3-kube-api-access-wfbzr\") pod \"dnsmasq-dns-66ddf4b5cc-2n29n\" (UID: \"5928ff8b-751a-411b-8705-9564341390c3\") " pod="openstack/dnsmasq-dns-66ddf4b5cc-2n29n" Oct 06 15:10:02 crc kubenswrapper[4757]: I1006 15:10:02.724809 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5928ff8b-751a-411b-8705-9564341390c3-ovsdbserver-nb\") pod \"dnsmasq-dns-66ddf4b5cc-2n29n\" (UID: \"5928ff8b-751a-411b-8705-9564341390c3\") " pod="openstack/dnsmasq-dns-66ddf4b5cc-2n29n" Oct 06 15:10:02 crc kubenswrapper[4757]: I1006 15:10:02.724832 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d48cb0f6-a038-4b42-8414-89fd43612859-config-data-custom\") pod \"barbican-worker-8558c89d89-26z6j\" (UID: \"d48cb0f6-a038-4b42-8414-89fd43612859\") " pod="openstack/barbican-worker-8558c89d89-26z6j" Oct 06 15:10:02 crc kubenswrapper[4757]: I1006 15:10:02.724855 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d48cb0f6-a038-4b42-8414-89fd43612859-combined-ca-bundle\") pod \"barbican-worker-8558c89d89-26z6j\" (UID: \"d48cb0f6-a038-4b42-8414-89fd43612859\") " pod="openstack/barbican-worker-8558c89d89-26z6j" Oct 06 15:10:02 crc kubenswrapper[4757]: I1006 15:10:02.724875 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/96d85f17-d1f2-4f91-a644-0959da17f29e-config-data-custom\") pod \"barbican-keystone-listener-76b567cf98-2ljlg\" (UID: \"96d85f17-d1f2-4f91-a644-0959da17f29e\") " pod="openstack/barbican-keystone-listener-76b567cf98-2ljlg" Oct 06 15:10:02 crc kubenswrapper[4757]: I1006 15:10:02.724897 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/96d85f17-d1f2-4f91-a644-0959da17f29e-logs\") pod \"barbican-keystone-listener-76b567cf98-2ljlg\" (UID: \"96d85f17-d1f2-4f91-a644-0959da17f29e\") " pod="openstack/barbican-keystone-listener-76b567cf98-2ljlg" Oct 06 15:10:02 crc kubenswrapper[4757]: I1006 15:10:02.724925 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5928ff8b-751a-411b-8705-9564341390c3-ovsdbserver-sb\") pod \"dnsmasq-dns-66ddf4b5cc-2n29n\" (UID: \"5928ff8b-751a-411b-8705-9564341390c3\") " pod="openstack/dnsmasq-dns-66ddf4b5cc-2n29n" Oct 06 15:10:02 crc kubenswrapper[4757]: I1006 15:10:02.724992 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d48cb0f6-a038-4b42-8414-89fd43612859-config-data\") pod \"barbican-worker-8558c89d89-26z6j\" (UID: \"d48cb0f6-a038-4b42-8414-89fd43612859\") " pod="openstack/barbican-worker-8558c89d89-26z6j" Oct 06 15:10:02 crc kubenswrapper[4757]: I1006 15:10:02.725015 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/96d85f17-d1f2-4f91-a644-0959da17f29e-config-data\") pod \"barbican-keystone-listener-76b567cf98-2ljlg\" (UID: \"96d85f17-d1f2-4f91-a644-0959da17f29e\") " pod="openstack/barbican-keystone-listener-76b567cf98-2ljlg" Oct 06 15:10:02 crc kubenswrapper[4757]: I1006 15:10:02.726306 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d48cb0f6-a038-4b42-8414-89fd43612859-logs\") pod \"barbican-worker-8558c89d89-26z6j\" (UID: \"d48cb0f6-a038-4b42-8414-89fd43612859\") " pod="openstack/barbican-worker-8558c89d89-26z6j" Oct 06 15:10:02 crc kubenswrapper[4757]: I1006 15:10:02.726586 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/96d85f17-d1f2-4f91-a644-0959da17f29e-logs\") pod \"barbican-keystone-listener-76b567cf98-2ljlg\" (UID: \"96d85f17-d1f2-4f91-a644-0959da17f29e\") " pod="openstack/barbican-keystone-listener-76b567cf98-2ljlg" Oct 06 15:10:02 crc kubenswrapper[4757]: I1006 15:10:02.734863 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96d85f17-d1f2-4f91-a644-0959da17f29e-combined-ca-bundle\") pod \"barbican-keystone-listener-76b567cf98-2ljlg\" (UID: \"96d85f17-d1f2-4f91-a644-0959da17f29e\") " pod="openstack/barbican-keystone-listener-76b567cf98-2ljlg" Oct 06 15:10:02 crc kubenswrapper[4757]: I1006 15:10:02.740289 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d48cb0f6-a038-4b42-8414-89fd43612859-config-data-custom\") pod \"barbican-worker-8558c89d89-26z6j\" (UID: \"d48cb0f6-a038-4b42-8414-89fd43612859\") " pod="openstack/barbican-worker-8558c89d89-26z6j" Oct 06 15:10:02 crc kubenswrapper[4757]: I1006 15:10:02.744437 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/96d85f17-d1f2-4f91-a644-0959da17f29e-config-data-custom\") pod \"barbican-keystone-listener-76b567cf98-2ljlg\" (UID: \"96d85f17-d1f2-4f91-a644-0959da17f29e\") " pod="openstack/barbican-keystone-listener-76b567cf98-2ljlg" Oct 06 15:10:02 crc kubenswrapper[4757]: I1006 15:10:02.745374 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d48cb0f6-a038-4b42-8414-89fd43612859-config-data\") pod \"barbican-worker-8558c89d89-26z6j\" (UID: \"d48cb0f6-a038-4b42-8414-89fd43612859\") " pod="openstack/barbican-worker-8558c89d89-26z6j" Oct 06 15:10:02 crc kubenswrapper[4757]: I1006 15:10:02.750804 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v5w6m\" (UniqueName: \"kubernetes.io/projected/96d85f17-d1f2-4f91-a644-0959da17f29e-kube-api-access-v5w6m\") pod \"barbican-keystone-listener-76b567cf98-2ljlg\" (UID: \"96d85f17-d1f2-4f91-a644-0959da17f29e\") " pod="openstack/barbican-keystone-listener-76b567cf98-2ljlg" Oct 06 15:10:02 crc kubenswrapper[4757]: I1006 15:10:02.753147 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fvc8w\" (UniqueName: \"kubernetes.io/projected/d48cb0f6-a038-4b42-8414-89fd43612859-kube-api-access-fvc8w\") pod \"barbican-worker-8558c89d89-26z6j\" (UID: \"d48cb0f6-a038-4b42-8414-89fd43612859\") " pod="openstack/barbican-worker-8558c89d89-26z6j" Oct 06 15:10:02 crc kubenswrapper[4757]: I1006 15:10:02.756252 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d48cb0f6-a038-4b42-8414-89fd43612859-combined-ca-bundle\") pod \"barbican-worker-8558c89d89-26z6j\" (UID: \"d48cb0f6-a038-4b42-8414-89fd43612859\") " pod="openstack/barbican-worker-8558c89d89-26z6j" Oct 06 15:10:02 crc kubenswrapper[4757]: I1006 15:10:02.759954 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/96d85f17-d1f2-4f91-a644-0959da17f29e-config-data\") pod \"barbican-keystone-listener-76b567cf98-2ljlg\" (UID: \"96d85f17-d1f2-4f91-a644-0959da17f29e\") " pod="openstack/barbican-keystone-listener-76b567cf98-2ljlg" Oct 06 15:10:02 crc kubenswrapper[4757]: I1006 15:10:02.826608 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5928ff8b-751a-411b-8705-9564341390c3-ovsdbserver-sb\") pod \"dnsmasq-dns-66ddf4b5cc-2n29n\" (UID: \"5928ff8b-751a-411b-8705-9564341390c3\") " pod="openstack/dnsmasq-dns-66ddf4b5cc-2n29n" Oct 06 15:10:02 crc kubenswrapper[4757]: I1006 15:10:02.826694 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/27be7810-9fad-4980-b82f-ce3f6e77a913-config-data-custom\") pod \"barbican-api-5fbd45848d-wkv66\" (UID: \"27be7810-9fad-4980-b82f-ce3f6e77a913\") " pod="openstack/barbican-api-5fbd45848d-wkv66" Oct 06 15:10:02 crc kubenswrapper[4757]: I1006 15:10:02.826741 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/27be7810-9fad-4980-b82f-ce3f6e77a913-config-data\") pod \"barbican-api-5fbd45848d-wkv66\" (UID: \"27be7810-9fad-4980-b82f-ce3f6e77a913\") " pod="openstack/barbican-api-5fbd45848d-wkv66" Oct 06 15:10:02 crc kubenswrapper[4757]: I1006 15:10:02.826794 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5928ff8b-751a-411b-8705-9564341390c3-dns-svc\") pod \"dnsmasq-dns-66ddf4b5cc-2n29n\" (UID: \"5928ff8b-751a-411b-8705-9564341390c3\") " pod="openstack/dnsmasq-dns-66ddf4b5cc-2n29n" Oct 06 15:10:02 crc kubenswrapper[4757]: I1006 15:10:02.826827 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hchs5\" (UniqueName: \"kubernetes.io/projected/27be7810-9fad-4980-b82f-ce3f6e77a913-kube-api-access-hchs5\") pod \"barbican-api-5fbd45848d-wkv66\" (UID: \"27be7810-9fad-4980-b82f-ce3f6e77a913\") " pod="openstack/barbican-api-5fbd45848d-wkv66" Oct 06 15:10:02 crc kubenswrapper[4757]: I1006 15:10:02.826864 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/27be7810-9fad-4980-b82f-ce3f6e77a913-combined-ca-bundle\") pod \"barbican-api-5fbd45848d-wkv66\" (UID: \"27be7810-9fad-4980-b82f-ce3f6e77a913\") " pod="openstack/barbican-api-5fbd45848d-wkv66" Oct 06 15:10:02 crc kubenswrapper[4757]: I1006 15:10:02.826920 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5928ff8b-751a-411b-8705-9564341390c3-config\") pod \"dnsmasq-dns-66ddf4b5cc-2n29n\" (UID: \"5928ff8b-751a-411b-8705-9564341390c3\") " pod="openstack/dnsmasq-dns-66ddf4b5cc-2n29n" Oct 06 15:10:02 crc kubenswrapper[4757]: I1006 15:10:02.826944 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wfbzr\" (UniqueName: \"kubernetes.io/projected/5928ff8b-751a-411b-8705-9564341390c3-kube-api-access-wfbzr\") pod \"dnsmasq-dns-66ddf4b5cc-2n29n\" (UID: \"5928ff8b-751a-411b-8705-9564341390c3\") " pod="openstack/dnsmasq-dns-66ddf4b5cc-2n29n" Oct 06 15:10:02 crc kubenswrapper[4757]: I1006 15:10:02.826995 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/27be7810-9fad-4980-b82f-ce3f6e77a913-logs\") pod \"barbican-api-5fbd45848d-wkv66\" (UID: \"27be7810-9fad-4980-b82f-ce3f6e77a913\") " pod="openstack/barbican-api-5fbd45848d-wkv66" Oct 06 15:10:02 crc kubenswrapper[4757]: I1006 15:10:02.827031 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5928ff8b-751a-411b-8705-9564341390c3-ovsdbserver-nb\") pod \"dnsmasq-dns-66ddf4b5cc-2n29n\" (UID: \"5928ff8b-751a-411b-8705-9564341390c3\") " pod="openstack/dnsmasq-dns-66ddf4b5cc-2n29n" Oct 06 15:10:02 crc kubenswrapper[4757]: I1006 15:10:02.828361 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5928ff8b-751a-411b-8705-9564341390c3-config\") pod \"dnsmasq-dns-66ddf4b5cc-2n29n\" (UID: \"5928ff8b-751a-411b-8705-9564341390c3\") " pod="openstack/dnsmasq-dns-66ddf4b5cc-2n29n" Oct 06 15:10:02 crc kubenswrapper[4757]: I1006 15:10:02.828466 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5928ff8b-751a-411b-8705-9564341390c3-ovsdbserver-sb\") pod \"dnsmasq-dns-66ddf4b5cc-2n29n\" (UID: \"5928ff8b-751a-411b-8705-9564341390c3\") " pod="openstack/dnsmasq-dns-66ddf4b5cc-2n29n" Oct 06 15:10:02 crc kubenswrapper[4757]: I1006 15:10:02.828710 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5928ff8b-751a-411b-8705-9564341390c3-ovsdbserver-nb\") pod \"dnsmasq-dns-66ddf4b5cc-2n29n\" (UID: \"5928ff8b-751a-411b-8705-9564341390c3\") " pod="openstack/dnsmasq-dns-66ddf4b5cc-2n29n" Oct 06 15:10:02 crc kubenswrapper[4757]: I1006 15:10:02.829020 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5928ff8b-751a-411b-8705-9564341390c3-dns-svc\") pod \"dnsmasq-dns-66ddf4b5cc-2n29n\" (UID: \"5928ff8b-751a-411b-8705-9564341390c3\") " pod="openstack/dnsmasq-dns-66ddf4b5cc-2n29n" Oct 06 15:10:02 crc kubenswrapper[4757]: I1006 15:10:02.840118 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-8558c89d89-26z6j" Oct 06 15:10:02 crc kubenswrapper[4757]: I1006 15:10:02.844060 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wfbzr\" (UniqueName: \"kubernetes.io/projected/5928ff8b-751a-411b-8705-9564341390c3-kube-api-access-wfbzr\") pod \"dnsmasq-dns-66ddf4b5cc-2n29n\" (UID: \"5928ff8b-751a-411b-8705-9564341390c3\") " pod="openstack/dnsmasq-dns-66ddf4b5cc-2n29n" Oct 06 15:10:02 crc kubenswrapper[4757]: I1006 15:10:02.857239 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-76b567cf98-2ljlg" Oct 06 15:10:02 crc kubenswrapper[4757]: I1006 15:10:02.904759 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-66ddf4b5cc-2n29n" Oct 06 15:10:02 crc kubenswrapper[4757]: I1006 15:10:02.928402 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/27be7810-9fad-4980-b82f-ce3f6e77a913-config-data-custom\") pod \"barbican-api-5fbd45848d-wkv66\" (UID: \"27be7810-9fad-4980-b82f-ce3f6e77a913\") " pod="openstack/barbican-api-5fbd45848d-wkv66" Oct 06 15:10:02 crc kubenswrapper[4757]: I1006 15:10:02.928456 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/27be7810-9fad-4980-b82f-ce3f6e77a913-config-data\") pod \"barbican-api-5fbd45848d-wkv66\" (UID: \"27be7810-9fad-4980-b82f-ce3f6e77a913\") " pod="openstack/barbican-api-5fbd45848d-wkv66" Oct 06 15:10:02 crc kubenswrapper[4757]: I1006 15:10:02.928517 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hchs5\" (UniqueName: \"kubernetes.io/projected/27be7810-9fad-4980-b82f-ce3f6e77a913-kube-api-access-hchs5\") pod \"barbican-api-5fbd45848d-wkv66\" (UID: \"27be7810-9fad-4980-b82f-ce3f6e77a913\") " pod="openstack/barbican-api-5fbd45848d-wkv66" Oct 06 15:10:02 crc kubenswrapper[4757]: I1006 15:10:02.928552 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/27be7810-9fad-4980-b82f-ce3f6e77a913-combined-ca-bundle\") pod \"barbican-api-5fbd45848d-wkv66\" (UID: \"27be7810-9fad-4980-b82f-ce3f6e77a913\") " pod="openstack/barbican-api-5fbd45848d-wkv66" Oct 06 15:10:02 crc kubenswrapper[4757]: I1006 15:10:02.928662 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/27be7810-9fad-4980-b82f-ce3f6e77a913-logs\") pod \"barbican-api-5fbd45848d-wkv66\" (UID: \"27be7810-9fad-4980-b82f-ce3f6e77a913\") " pod="openstack/barbican-api-5fbd45848d-wkv66" Oct 06 15:10:02 crc kubenswrapper[4757]: I1006 15:10:02.929057 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/27be7810-9fad-4980-b82f-ce3f6e77a913-logs\") pod \"barbican-api-5fbd45848d-wkv66\" (UID: \"27be7810-9fad-4980-b82f-ce3f6e77a913\") " pod="openstack/barbican-api-5fbd45848d-wkv66" Oct 06 15:10:02 crc kubenswrapper[4757]: I1006 15:10:02.933911 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/27be7810-9fad-4980-b82f-ce3f6e77a913-config-data-custom\") pod \"barbican-api-5fbd45848d-wkv66\" (UID: \"27be7810-9fad-4980-b82f-ce3f6e77a913\") " pod="openstack/barbican-api-5fbd45848d-wkv66" Oct 06 15:10:02 crc kubenswrapper[4757]: I1006 15:10:02.937913 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/27be7810-9fad-4980-b82f-ce3f6e77a913-combined-ca-bundle\") pod \"barbican-api-5fbd45848d-wkv66\" (UID: \"27be7810-9fad-4980-b82f-ce3f6e77a913\") " pod="openstack/barbican-api-5fbd45848d-wkv66" Oct 06 15:10:02 crc kubenswrapper[4757]: I1006 15:10:02.939262 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/27be7810-9fad-4980-b82f-ce3f6e77a913-config-data\") pod \"barbican-api-5fbd45848d-wkv66\" (UID: \"27be7810-9fad-4980-b82f-ce3f6e77a913\") " pod="openstack/barbican-api-5fbd45848d-wkv66" Oct 06 15:10:02 crc kubenswrapper[4757]: I1006 15:10:02.948562 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hchs5\" (UniqueName: \"kubernetes.io/projected/27be7810-9fad-4980-b82f-ce3f6e77a913-kube-api-access-hchs5\") pod \"barbican-api-5fbd45848d-wkv66\" (UID: \"27be7810-9fad-4980-b82f-ce3f6e77a913\") " pod="openstack/barbican-api-5fbd45848d-wkv66" Oct 06 15:10:03 crc kubenswrapper[4757]: I1006 15:10:03.042533 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5fbd45848d-wkv66" Oct 06 15:10:03 crc kubenswrapper[4757]: I1006 15:10:03.353558 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-8558c89d89-26z6j"] Oct 06 15:10:03 crc kubenswrapper[4757]: I1006 15:10:03.420485 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-76b567cf98-2ljlg"] Oct 06 15:10:03 crc kubenswrapper[4757]: I1006 15:10:03.493227 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-66ddf4b5cc-2n29n"] Oct 06 15:10:03 crc kubenswrapper[4757]: I1006 15:10:03.611950 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-5fbd45848d-wkv66"] Oct 06 15:10:03 crc kubenswrapper[4757]: W1006 15:10:03.622767 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod27be7810_9fad_4980_b82f_ce3f6e77a913.slice/crio-a14f1a009fbf3ec24b75bba4fa0d070d30040b2b25b9649aa3d12ef248223910 WatchSource:0}: Error finding container a14f1a009fbf3ec24b75bba4fa0d070d30040b2b25b9649aa3d12ef248223910: Status 404 returned error can't find the container with id a14f1a009fbf3ec24b75bba4fa0d070d30040b2b25b9649aa3d12ef248223910 Oct 06 15:10:04 crc kubenswrapper[4757]: I1006 15:10:04.290763 4757 generic.go:334] "Generic (PLEG): container finished" podID="5928ff8b-751a-411b-8705-9564341390c3" containerID="eb359a8ee3f3d8100fdfe32954ae47e3545479d943f2a6662a76ad3ec9563a62" exitCode=0 Oct 06 15:10:04 crc kubenswrapper[4757]: I1006 15:10:04.290920 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-66ddf4b5cc-2n29n" event={"ID":"5928ff8b-751a-411b-8705-9564341390c3","Type":"ContainerDied","Data":"eb359a8ee3f3d8100fdfe32954ae47e3545479d943f2a6662a76ad3ec9563a62"} Oct 06 15:10:04 crc kubenswrapper[4757]: I1006 15:10:04.291198 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-66ddf4b5cc-2n29n" event={"ID":"5928ff8b-751a-411b-8705-9564341390c3","Type":"ContainerStarted","Data":"e1e17d027b0f05fcf37ae652efb05f16f59acf6d2a1d9404a8bdc262c8eadba9"} Oct 06 15:10:04 crc kubenswrapper[4757]: I1006 15:10:04.293699 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5fbd45848d-wkv66" event={"ID":"27be7810-9fad-4980-b82f-ce3f6e77a913","Type":"ContainerStarted","Data":"edc81fef501409be291a17ad2344a67982595b65c17ad687ab9b68b2b25343a0"} Oct 06 15:10:04 crc kubenswrapper[4757]: I1006 15:10:04.293747 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5fbd45848d-wkv66" event={"ID":"27be7810-9fad-4980-b82f-ce3f6e77a913","Type":"ContainerStarted","Data":"e3b150aa863dd1c9fbea677efdf76ba62cbb34ee2a7aee8abbbd6e9ee1262356"} Oct 06 15:10:04 crc kubenswrapper[4757]: I1006 15:10:04.293765 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5fbd45848d-wkv66" event={"ID":"27be7810-9fad-4980-b82f-ce3f6e77a913","Type":"ContainerStarted","Data":"a14f1a009fbf3ec24b75bba4fa0d070d30040b2b25b9649aa3d12ef248223910"} Oct 06 15:10:04 crc kubenswrapper[4757]: I1006 15:10:04.293941 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-5fbd45848d-wkv66" Oct 06 15:10:04 crc kubenswrapper[4757]: I1006 15:10:04.294063 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-5fbd45848d-wkv66" Oct 06 15:10:04 crc kubenswrapper[4757]: I1006 15:10:04.296540 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-8558c89d89-26z6j" event={"ID":"d48cb0f6-a038-4b42-8414-89fd43612859","Type":"ContainerStarted","Data":"2ad13996cec872aaea5ff69115172d97c41424acdda43a93984776848733ebd4"} Oct 06 15:10:04 crc kubenswrapper[4757]: I1006 15:10:04.296911 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-8558c89d89-26z6j" event={"ID":"d48cb0f6-a038-4b42-8414-89fd43612859","Type":"ContainerStarted","Data":"b585394c8befa08773576cd544ba593f81fb78643b733e35f30f6a5d3df747f6"} Oct 06 15:10:04 crc kubenswrapper[4757]: I1006 15:10:04.296928 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-8558c89d89-26z6j" event={"ID":"d48cb0f6-a038-4b42-8414-89fd43612859","Type":"ContainerStarted","Data":"9d907cbf59ed5814c4bc3b887dee28265b0aa9f62ce9ca9071422b9310c62e58"} Oct 06 15:10:04 crc kubenswrapper[4757]: I1006 15:10:04.298981 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-76b567cf98-2ljlg" event={"ID":"96d85f17-d1f2-4f91-a644-0959da17f29e","Type":"ContainerStarted","Data":"bca8fef6c69eaec532e8e2c6627f91084d5d4031695f0dae21db39eeaa3eb3b3"} Oct 06 15:10:04 crc kubenswrapper[4757]: I1006 15:10:04.299021 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-76b567cf98-2ljlg" event={"ID":"96d85f17-d1f2-4f91-a644-0959da17f29e","Type":"ContainerStarted","Data":"d59c5422c2bcfc9c5a0874bf122bbdaf3b92fea93b9b7b01b41e50348d226cf2"} Oct 06 15:10:04 crc kubenswrapper[4757]: I1006 15:10:04.299031 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-76b567cf98-2ljlg" event={"ID":"96d85f17-d1f2-4f91-a644-0959da17f29e","Type":"ContainerStarted","Data":"a9c7469b32ca060286c16ff7dabaf84412b9ae55ea68aba7cd3f51b20ea8c8a8"} Oct 06 15:10:04 crc kubenswrapper[4757]: I1006 15:10:04.344108 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-8558c89d89-26z6j" podStartSLOduration=2.344071215 podStartE2EDuration="2.344071215s" podCreationTimestamp="2025-10-06 15:10:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 15:10:04.336787159 +0000 UTC m=+5492.834105696" watchObservedRunningTime="2025-10-06 15:10:04.344071215 +0000 UTC m=+5492.841389752" Oct 06 15:10:04 crc kubenswrapper[4757]: I1006 15:10:04.361593 4757 patch_prober.go:28] interesting pod/machine-config-daemon-7tb7h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 06 15:10:04 crc kubenswrapper[4757]: I1006 15:10:04.361656 4757 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 06 15:10:04 crc kubenswrapper[4757]: I1006 15:10:04.361707 4757 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" Oct 06 15:10:04 crc kubenswrapper[4757]: I1006 15:10:04.362325 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-76b567cf98-2ljlg" podStartSLOduration=2.362303405 podStartE2EDuration="2.362303405s" podCreationTimestamp="2025-10-06 15:10:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 15:10:04.358245493 +0000 UTC m=+5492.855564030" watchObservedRunningTime="2025-10-06 15:10:04.362303405 +0000 UTC m=+5492.859621952" Oct 06 15:10:04 crc kubenswrapper[4757]: I1006 15:10:04.362476 4757 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"6ada3739de20b08be50b8e188471c034ef69b5ce1559383ec174465e8517cd1b"} pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 06 15:10:04 crc kubenswrapper[4757]: I1006 15:10:04.362546 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" containerName="machine-config-daemon" containerID="cri-o://6ada3739de20b08be50b8e188471c034ef69b5ce1559383ec174465e8517cd1b" gracePeriod=600 Oct 06 15:10:04 crc kubenswrapper[4757]: I1006 15:10:04.398727 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-5fbd45848d-wkv66" podStartSLOduration=2.398702012 podStartE2EDuration="2.398702012s" podCreationTimestamp="2025-10-06 15:10:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 15:10:04.381187165 +0000 UTC m=+5492.878505712" watchObservedRunningTime="2025-10-06 15:10:04.398702012 +0000 UTC m=+5492.896020559" Oct 06 15:10:05 crc kubenswrapper[4757]: I1006 15:10:05.307773 4757 generic.go:334] "Generic (PLEG): container finished" podID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" containerID="6ada3739de20b08be50b8e188471c034ef69b5ce1559383ec174465e8517cd1b" exitCode=0 Oct 06 15:10:05 crc kubenswrapper[4757]: I1006 15:10:05.307850 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" event={"ID":"0010c888-d5ad-4b2b-8309-1647fdf0dee3","Type":"ContainerDied","Data":"6ada3739de20b08be50b8e188471c034ef69b5ce1559383ec174465e8517cd1b"} Oct 06 15:10:05 crc kubenswrapper[4757]: I1006 15:10:05.308407 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" event={"ID":"0010c888-d5ad-4b2b-8309-1647fdf0dee3","Type":"ContainerStarted","Data":"c9a01d3b8e651ed2a273ac73def6d0e86398bfbdd3b94e4d6d085ceec8dc0a3f"} Oct 06 15:10:05 crc kubenswrapper[4757]: I1006 15:10:05.308428 4757 scope.go:117] "RemoveContainer" containerID="5c4563212ce072d71c658e00c8096ab0ddad498c69fa13e6bee8d2a76ec2f125" Oct 06 15:10:05 crc kubenswrapper[4757]: I1006 15:10:05.310489 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-66ddf4b5cc-2n29n" event={"ID":"5928ff8b-751a-411b-8705-9564341390c3","Type":"ContainerStarted","Data":"c358be74b36fbd9a57c8796b4f3cff873f073af4fb0200cc0fe7cc97d0276231"} Oct 06 15:10:05 crc kubenswrapper[4757]: I1006 15:10:05.339289 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-66ddf4b5cc-2n29n" podStartSLOduration=3.339272715 podStartE2EDuration="3.339272715s" podCreationTimestamp="2025-10-06 15:10:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 15:10:05.338596362 +0000 UTC m=+5493.835914889" watchObservedRunningTime="2025-10-06 15:10:05.339272715 +0000 UTC m=+5493.836591252" Oct 06 15:10:05 crc kubenswrapper[4757]: I1006 15:10:05.646058 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-c9b794d8d-fv647"] Oct 06 15:10:05 crc kubenswrapper[4757]: I1006 15:10:05.647929 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-c9b794d8d-fv647" Oct 06 15:10:05 crc kubenswrapper[4757]: I1006 15:10:05.653514 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-public-svc" Oct 06 15:10:05 crc kubenswrapper[4757]: I1006 15:10:05.653611 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-internal-svc" Oct 06 15:10:05 crc kubenswrapper[4757]: I1006 15:10:05.659651 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-c9b794d8d-fv647"] Oct 06 15:10:05 crc kubenswrapper[4757]: I1006 15:10:05.804296 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1e2f444f-a0a5-4204-92d9-e253d6c676a6-internal-tls-certs\") pod \"barbican-api-c9b794d8d-fv647\" (UID: \"1e2f444f-a0a5-4204-92d9-e253d6c676a6\") " pod="openstack/barbican-api-c9b794d8d-fv647" Oct 06 15:10:05 crc kubenswrapper[4757]: I1006 15:10:05.804384 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-chkbx\" (UniqueName: \"kubernetes.io/projected/1e2f444f-a0a5-4204-92d9-e253d6c676a6-kube-api-access-chkbx\") pod \"barbican-api-c9b794d8d-fv647\" (UID: \"1e2f444f-a0a5-4204-92d9-e253d6c676a6\") " pod="openstack/barbican-api-c9b794d8d-fv647" Oct 06 15:10:05 crc kubenswrapper[4757]: I1006 15:10:05.804407 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1e2f444f-a0a5-4204-92d9-e253d6c676a6-config-data\") pod \"barbican-api-c9b794d8d-fv647\" (UID: \"1e2f444f-a0a5-4204-92d9-e253d6c676a6\") " pod="openstack/barbican-api-c9b794d8d-fv647" Oct 06 15:10:05 crc kubenswrapper[4757]: I1006 15:10:05.804450 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1e2f444f-a0a5-4204-92d9-e253d6c676a6-combined-ca-bundle\") pod \"barbican-api-c9b794d8d-fv647\" (UID: \"1e2f444f-a0a5-4204-92d9-e253d6c676a6\") " pod="openstack/barbican-api-c9b794d8d-fv647" Oct 06 15:10:05 crc kubenswrapper[4757]: I1006 15:10:05.804473 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1e2f444f-a0a5-4204-92d9-e253d6c676a6-config-data-custom\") pod \"barbican-api-c9b794d8d-fv647\" (UID: \"1e2f444f-a0a5-4204-92d9-e253d6c676a6\") " pod="openstack/barbican-api-c9b794d8d-fv647" Oct 06 15:10:05 crc kubenswrapper[4757]: I1006 15:10:05.804562 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1e2f444f-a0a5-4204-92d9-e253d6c676a6-logs\") pod \"barbican-api-c9b794d8d-fv647\" (UID: \"1e2f444f-a0a5-4204-92d9-e253d6c676a6\") " pod="openstack/barbican-api-c9b794d8d-fv647" Oct 06 15:10:05 crc kubenswrapper[4757]: I1006 15:10:05.804614 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1e2f444f-a0a5-4204-92d9-e253d6c676a6-public-tls-certs\") pod \"barbican-api-c9b794d8d-fv647\" (UID: \"1e2f444f-a0a5-4204-92d9-e253d6c676a6\") " pod="openstack/barbican-api-c9b794d8d-fv647" Oct 06 15:10:05 crc kubenswrapper[4757]: I1006 15:10:05.906321 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-chkbx\" (UniqueName: \"kubernetes.io/projected/1e2f444f-a0a5-4204-92d9-e253d6c676a6-kube-api-access-chkbx\") pod \"barbican-api-c9b794d8d-fv647\" (UID: \"1e2f444f-a0a5-4204-92d9-e253d6c676a6\") " pod="openstack/barbican-api-c9b794d8d-fv647" Oct 06 15:10:05 crc kubenswrapper[4757]: I1006 15:10:05.906373 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1e2f444f-a0a5-4204-92d9-e253d6c676a6-config-data\") pod \"barbican-api-c9b794d8d-fv647\" (UID: \"1e2f444f-a0a5-4204-92d9-e253d6c676a6\") " pod="openstack/barbican-api-c9b794d8d-fv647" Oct 06 15:10:05 crc kubenswrapper[4757]: I1006 15:10:05.906419 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1e2f444f-a0a5-4204-92d9-e253d6c676a6-combined-ca-bundle\") pod \"barbican-api-c9b794d8d-fv647\" (UID: \"1e2f444f-a0a5-4204-92d9-e253d6c676a6\") " pod="openstack/barbican-api-c9b794d8d-fv647" Oct 06 15:10:05 crc kubenswrapper[4757]: I1006 15:10:05.906448 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1e2f444f-a0a5-4204-92d9-e253d6c676a6-config-data-custom\") pod \"barbican-api-c9b794d8d-fv647\" (UID: \"1e2f444f-a0a5-4204-92d9-e253d6c676a6\") " pod="openstack/barbican-api-c9b794d8d-fv647" Oct 06 15:10:05 crc kubenswrapper[4757]: I1006 15:10:05.906478 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1e2f444f-a0a5-4204-92d9-e253d6c676a6-logs\") pod \"barbican-api-c9b794d8d-fv647\" (UID: \"1e2f444f-a0a5-4204-92d9-e253d6c676a6\") " pod="openstack/barbican-api-c9b794d8d-fv647" Oct 06 15:10:05 crc kubenswrapper[4757]: I1006 15:10:05.906498 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1e2f444f-a0a5-4204-92d9-e253d6c676a6-public-tls-certs\") pod \"barbican-api-c9b794d8d-fv647\" (UID: \"1e2f444f-a0a5-4204-92d9-e253d6c676a6\") " pod="openstack/barbican-api-c9b794d8d-fv647" Oct 06 15:10:05 crc kubenswrapper[4757]: I1006 15:10:05.906572 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1e2f444f-a0a5-4204-92d9-e253d6c676a6-internal-tls-certs\") pod \"barbican-api-c9b794d8d-fv647\" (UID: \"1e2f444f-a0a5-4204-92d9-e253d6c676a6\") " pod="openstack/barbican-api-c9b794d8d-fv647" Oct 06 15:10:05 crc kubenswrapper[4757]: I1006 15:10:05.906938 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1e2f444f-a0a5-4204-92d9-e253d6c676a6-logs\") pod \"barbican-api-c9b794d8d-fv647\" (UID: \"1e2f444f-a0a5-4204-92d9-e253d6c676a6\") " pod="openstack/barbican-api-c9b794d8d-fv647" Oct 06 15:10:05 crc kubenswrapper[4757]: I1006 15:10:05.912512 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1e2f444f-a0a5-4204-92d9-e253d6c676a6-public-tls-certs\") pod \"barbican-api-c9b794d8d-fv647\" (UID: \"1e2f444f-a0a5-4204-92d9-e253d6c676a6\") " pod="openstack/barbican-api-c9b794d8d-fv647" Oct 06 15:10:05 crc kubenswrapper[4757]: I1006 15:10:05.913505 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1e2f444f-a0a5-4204-92d9-e253d6c676a6-config-data-custom\") pod \"barbican-api-c9b794d8d-fv647\" (UID: \"1e2f444f-a0a5-4204-92d9-e253d6c676a6\") " pod="openstack/barbican-api-c9b794d8d-fv647" Oct 06 15:10:05 crc kubenswrapper[4757]: I1006 15:10:05.913565 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1e2f444f-a0a5-4204-92d9-e253d6c676a6-combined-ca-bundle\") pod \"barbican-api-c9b794d8d-fv647\" (UID: \"1e2f444f-a0a5-4204-92d9-e253d6c676a6\") " pod="openstack/barbican-api-c9b794d8d-fv647" Oct 06 15:10:05 crc kubenswrapper[4757]: I1006 15:10:05.914122 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1e2f444f-a0a5-4204-92d9-e253d6c676a6-config-data\") pod \"barbican-api-c9b794d8d-fv647\" (UID: \"1e2f444f-a0a5-4204-92d9-e253d6c676a6\") " pod="openstack/barbican-api-c9b794d8d-fv647" Oct 06 15:10:05 crc kubenswrapper[4757]: I1006 15:10:05.915385 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1e2f444f-a0a5-4204-92d9-e253d6c676a6-internal-tls-certs\") pod \"barbican-api-c9b794d8d-fv647\" (UID: \"1e2f444f-a0a5-4204-92d9-e253d6c676a6\") " pod="openstack/barbican-api-c9b794d8d-fv647" Oct 06 15:10:05 crc kubenswrapper[4757]: I1006 15:10:05.932368 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-chkbx\" (UniqueName: \"kubernetes.io/projected/1e2f444f-a0a5-4204-92d9-e253d6c676a6-kube-api-access-chkbx\") pod \"barbican-api-c9b794d8d-fv647\" (UID: \"1e2f444f-a0a5-4204-92d9-e253d6c676a6\") " pod="openstack/barbican-api-c9b794d8d-fv647" Oct 06 15:10:05 crc kubenswrapper[4757]: I1006 15:10:05.969010 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-c9b794d8d-fv647" Oct 06 15:10:06 crc kubenswrapper[4757]: I1006 15:10:06.321477 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-66ddf4b5cc-2n29n" Oct 06 15:10:06 crc kubenswrapper[4757]: I1006 15:10:06.459062 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-c9b794d8d-fv647"] Oct 06 15:10:06 crc kubenswrapper[4757]: W1006 15:10:06.468649 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1e2f444f_a0a5_4204_92d9_e253d6c676a6.slice/crio-7ced54915d2aa4bfbab1cef97a0cb67824c825b9d51beff3228783dd72ef7e89 WatchSource:0}: Error finding container 7ced54915d2aa4bfbab1cef97a0cb67824c825b9d51beff3228783dd72ef7e89: Status 404 returned error can't find the container with id 7ced54915d2aa4bfbab1cef97a0cb67824c825b9d51beff3228783dd72ef7e89 Oct 06 15:10:07 crc kubenswrapper[4757]: I1006 15:10:07.336397 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-c9b794d8d-fv647" event={"ID":"1e2f444f-a0a5-4204-92d9-e253d6c676a6","Type":"ContainerStarted","Data":"014b4f3948cc5b6b1eae04b03b2d4f125e32709aaf91aff47c7684904dad269e"} Oct 06 15:10:07 crc kubenswrapper[4757]: I1006 15:10:07.336894 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-c9b794d8d-fv647" event={"ID":"1e2f444f-a0a5-4204-92d9-e253d6c676a6","Type":"ContainerStarted","Data":"f0e81bc378efb03946df23fd31fa7399b4dd7be2918cec1a820e23cb8edd959c"} Oct 06 15:10:07 crc kubenswrapper[4757]: I1006 15:10:07.336955 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-c9b794d8d-fv647" event={"ID":"1e2f444f-a0a5-4204-92d9-e253d6c676a6","Type":"ContainerStarted","Data":"7ced54915d2aa4bfbab1cef97a0cb67824c825b9d51beff3228783dd72ef7e89"} Oct 06 15:10:07 crc kubenswrapper[4757]: I1006 15:10:07.365368 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-c9b794d8d-fv647" podStartSLOduration=2.3653312570000002 podStartE2EDuration="2.365331257s" podCreationTimestamp="2025-10-06 15:10:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 15:10:07.355418057 +0000 UTC m=+5495.852736684" watchObservedRunningTime="2025-10-06 15:10:07.365331257 +0000 UTC m=+5495.862649834" Oct 06 15:10:08 crc kubenswrapper[4757]: I1006 15:10:08.348593 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-c9b794d8d-fv647" Oct 06 15:10:08 crc kubenswrapper[4757]: I1006 15:10:08.348804 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-c9b794d8d-fv647" Oct 06 15:10:12 crc kubenswrapper[4757]: I1006 15:10:12.906255 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-66ddf4b5cc-2n29n" Oct 06 15:10:12 crc kubenswrapper[4757]: I1006 15:10:12.974525 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-cf9d54b65-rdpsv"] Oct 06 15:10:12 crc kubenswrapper[4757]: I1006 15:10:12.974960 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-cf9d54b65-rdpsv" podUID="8c99d116-9378-471e-a119-1a468f151a77" containerName="dnsmasq-dns" containerID="cri-o://f147cf0e982910f783624667339a228fb81806a50f34a3b26070524947194816" gracePeriod=10 Oct 06 15:10:13 crc kubenswrapper[4757]: I1006 15:10:13.153304 4757 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-cf9d54b65-rdpsv" podUID="8c99d116-9378-471e-a119-1a468f151a77" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.1.22:5353: connect: connection refused" Oct 06 15:10:13 crc kubenswrapper[4757]: I1006 15:10:13.408646 4757 generic.go:334] "Generic (PLEG): container finished" podID="8c99d116-9378-471e-a119-1a468f151a77" containerID="f147cf0e982910f783624667339a228fb81806a50f34a3b26070524947194816" exitCode=0 Oct 06 15:10:13 crc kubenswrapper[4757]: I1006 15:10:13.408695 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cf9d54b65-rdpsv" event={"ID":"8c99d116-9378-471e-a119-1a468f151a77","Type":"ContainerDied","Data":"f147cf0e982910f783624667339a228fb81806a50f34a3b26070524947194816"} Oct 06 15:10:13 crc kubenswrapper[4757]: I1006 15:10:13.478067 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-cf9d54b65-rdpsv" Oct 06 15:10:13 crc kubenswrapper[4757]: I1006 15:10:13.663807 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8c99d116-9378-471e-a119-1a468f151a77-config\") pod \"8c99d116-9378-471e-a119-1a468f151a77\" (UID: \"8c99d116-9378-471e-a119-1a468f151a77\") " Oct 06 15:10:13 crc kubenswrapper[4757]: I1006 15:10:13.663896 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8c99d116-9378-471e-a119-1a468f151a77-ovsdbserver-sb\") pod \"8c99d116-9378-471e-a119-1a468f151a77\" (UID: \"8c99d116-9378-471e-a119-1a468f151a77\") " Oct 06 15:10:13 crc kubenswrapper[4757]: I1006 15:10:13.663948 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8c99d116-9378-471e-a119-1a468f151a77-ovsdbserver-nb\") pod \"8c99d116-9378-471e-a119-1a468f151a77\" (UID: \"8c99d116-9378-471e-a119-1a468f151a77\") " Oct 06 15:10:13 crc kubenswrapper[4757]: I1006 15:10:13.664031 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8c99d116-9378-471e-a119-1a468f151a77-dns-svc\") pod \"8c99d116-9378-471e-a119-1a468f151a77\" (UID: \"8c99d116-9378-471e-a119-1a468f151a77\") " Oct 06 15:10:13 crc kubenswrapper[4757]: I1006 15:10:13.664159 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p4php\" (UniqueName: \"kubernetes.io/projected/8c99d116-9378-471e-a119-1a468f151a77-kube-api-access-p4php\") pod \"8c99d116-9378-471e-a119-1a468f151a77\" (UID: \"8c99d116-9378-471e-a119-1a468f151a77\") " Oct 06 15:10:13 crc kubenswrapper[4757]: I1006 15:10:13.673335 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8c99d116-9378-471e-a119-1a468f151a77-kube-api-access-p4php" (OuterVolumeSpecName: "kube-api-access-p4php") pod "8c99d116-9378-471e-a119-1a468f151a77" (UID: "8c99d116-9378-471e-a119-1a468f151a77"). InnerVolumeSpecName "kube-api-access-p4php". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 15:10:13 crc kubenswrapper[4757]: I1006 15:10:13.710205 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8c99d116-9378-471e-a119-1a468f151a77-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "8c99d116-9378-471e-a119-1a468f151a77" (UID: "8c99d116-9378-471e-a119-1a468f151a77"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 15:10:13 crc kubenswrapper[4757]: I1006 15:10:13.719382 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8c99d116-9378-471e-a119-1a468f151a77-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "8c99d116-9378-471e-a119-1a468f151a77" (UID: "8c99d116-9378-471e-a119-1a468f151a77"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 15:10:13 crc kubenswrapper[4757]: I1006 15:10:13.725243 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8c99d116-9378-471e-a119-1a468f151a77-config" (OuterVolumeSpecName: "config") pod "8c99d116-9378-471e-a119-1a468f151a77" (UID: "8c99d116-9378-471e-a119-1a468f151a77"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 15:10:13 crc kubenswrapper[4757]: I1006 15:10:13.731565 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8c99d116-9378-471e-a119-1a468f151a77-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "8c99d116-9378-471e-a119-1a468f151a77" (UID: "8c99d116-9378-471e-a119-1a468f151a77"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 15:10:13 crc kubenswrapper[4757]: I1006 15:10:13.766118 4757 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8c99d116-9378-471e-a119-1a468f151a77-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 06 15:10:13 crc kubenswrapper[4757]: I1006 15:10:13.766155 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p4php\" (UniqueName: \"kubernetes.io/projected/8c99d116-9378-471e-a119-1a468f151a77-kube-api-access-p4php\") on node \"crc\" DevicePath \"\"" Oct 06 15:10:13 crc kubenswrapper[4757]: I1006 15:10:13.766167 4757 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8c99d116-9378-471e-a119-1a468f151a77-config\") on node \"crc\" DevicePath \"\"" Oct 06 15:10:13 crc kubenswrapper[4757]: I1006 15:10:13.766177 4757 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8c99d116-9378-471e-a119-1a468f151a77-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 06 15:10:13 crc kubenswrapper[4757]: I1006 15:10:13.766187 4757 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8c99d116-9378-471e-a119-1a468f151a77-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 06 15:10:14 crc kubenswrapper[4757]: I1006 15:10:14.421907 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cf9d54b65-rdpsv" event={"ID":"8c99d116-9378-471e-a119-1a468f151a77","Type":"ContainerDied","Data":"86f73fb3b4fbeb456a6f34a939b54c0501ffc33bfe42af4f235f22e9050462a3"} Oct 06 15:10:14 crc kubenswrapper[4757]: I1006 15:10:14.422303 4757 scope.go:117] "RemoveContainer" containerID="f147cf0e982910f783624667339a228fb81806a50f34a3b26070524947194816" Oct 06 15:10:14 crc kubenswrapper[4757]: I1006 15:10:14.422475 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-cf9d54b65-rdpsv" Oct 06 15:10:14 crc kubenswrapper[4757]: I1006 15:10:14.458737 4757 scope.go:117] "RemoveContainer" containerID="41a0f93b8e39ea17f7827f0f1cac766f35defa977ed4b48d1cf41ddf11d0b91d" Oct 06 15:10:14 crc kubenswrapper[4757]: I1006 15:10:14.461651 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-cf9d54b65-rdpsv"] Oct 06 15:10:14 crc kubenswrapper[4757]: I1006 15:10:14.476744 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-cf9d54b65-rdpsv"] Oct 06 15:10:14 crc kubenswrapper[4757]: I1006 15:10:14.628779 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-5fbd45848d-wkv66" Oct 06 15:10:14 crc kubenswrapper[4757]: I1006 15:10:14.754951 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-5fbd45848d-wkv66" Oct 06 15:10:16 crc kubenswrapper[4757]: I1006 15:10:16.190154 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8c99d116-9378-471e-a119-1a468f151a77" path="/var/lib/kubelet/pods/8c99d116-9378-471e-a119-1a468f151a77/volumes" Oct 06 15:10:17 crc kubenswrapper[4757]: I1006 15:10:17.393787 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-c9b794d8d-fv647" Oct 06 15:10:17 crc kubenswrapper[4757]: I1006 15:10:17.449308 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-c9b794d8d-fv647" Oct 06 15:10:17 crc kubenswrapper[4757]: I1006 15:10:17.501425 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-5fbd45848d-wkv66"] Oct 06 15:10:17 crc kubenswrapper[4757]: I1006 15:10:17.502428 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-5fbd45848d-wkv66" podUID="27be7810-9fad-4980-b82f-ce3f6e77a913" containerName="barbican-api-log" containerID="cri-o://e3b150aa863dd1c9fbea677efdf76ba62cbb34ee2a7aee8abbbd6e9ee1262356" gracePeriod=30 Oct 06 15:10:17 crc kubenswrapper[4757]: I1006 15:10:17.502508 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-5fbd45848d-wkv66" podUID="27be7810-9fad-4980-b82f-ce3f6e77a913" containerName="barbican-api" containerID="cri-o://edc81fef501409be291a17ad2344a67982595b65c17ad687ab9b68b2b25343a0" gracePeriod=30 Oct 06 15:10:18 crc kubenswrapper[4757]: I1006 15:10:18.463864 4757 generic.go:334] "Generic (PLEG): container finished" podID="27be7810-9fad-4980-b82f-ce3f6e77a913" containerID="e3b150aa863dd1c9fbea677efdf76ba62cbb34ee2a7aee8abbbd6e9ee1262356" exitCode=143 Oct 06 15:10:18 crc kubenswrapper[4757]: I1006 15:10:18.464017 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5fbd45848d-wkv66" event={"ID":"27be7810-9fad-4980-b82f-ce3f6e77a913","Type":"ContainerDied","Data":"e3b150aa863dd1c9fbea677efdf76ba62cbb34ee2a7aee8abbbd6e9ee1262356"} Oct 06 15:10:20 crc kubenswrapper[4757]: I1006 15:10:20.669137 4757 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-5fbd45848d-wkv66" podUID="27be7810-9fad-4980-b82f-ce3f6e77a913" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.1.34:9311/healthcheck\": read tcp 10.217.0.2:33294->10.217.1.34:9311: read: connection reset by peer" Oct 06 15:10:20 crc kubenswrapper[4757]: I1006 15:10:20.669217 4757 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-5fbd45848d-wkv66" podUID="27be7810-9fad-4980-b82f-ce3f6e77a913" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.1.34:9311/healthcheck\": read tcp 10.217.0.2:33304->10.217.1.34:9311: read: connection reset by peer" Oct 06 15:10:21 crc kubenswrapper[4757]: I1006 15:10:21.055466 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5fbd45848d-wkv66" Oct 06 15:10:21 crc kubenswrapper[4757]: I1006 15:10:21.204304 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/27be7810-9fad-4980-b82f-ce3f6e77a913-logs\") pod \"27be7810-9fad-4980-b82f-ce3f6e77a913\" (UID: \"27be7810-9fad-4980-b82f-ce3f6e77a913\") " Oct 06 15:10:21 crc kubenswrapper[4757]: I1006 15:10:21.204394 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/27be7810-9fad-4980-b82f-ce3f6e77a913-config-data\") pod \"27be7810-9fad-4980-b82f-ce3f6e77a913\" (UID: \"27be7810-9fad-4980-b82f-ce3f6e77a913\") " Oct 06 15:10:21 crc kubenswrapper[4757]: I1006 15:10:21.204517 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/27be7810-9fad-4980-b82f-ce3f6e77a913-config-data-custom\") pod \"27be7810-9fad-4980-b82f-ce3f6e77a913\" (UID: \"27be7810-9fad-4980-b82f-ce3f6e77a913\") " Oct 06 15:10:21 crc kubenswrapper[4757]: I1006 15:10:21.204680 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/27be7810-9fad-4980-b82f-ce3f6e77a913-combined-ca-bundle\") pod \"27be7810-9fad-4980-b82f-ce3f6e77a913\" (UID: \"27be7810-9fad-4980-b82f-ce3f6e77a913\") " Oct 06 15:10:21 crc kubenswrapper[4757]: I1006 15:10:21.204804 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/27be7810-9fad-4980-b82f-ce3f6e77a913-logs" (OuterVolumeSpecName: "logs") pod "27be7810-9fad-4980-b82f-ce3f6e77a913" (UID: "27be7810-9fad-4980-b82f-ce3f6e77a913"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 15:10:21 crc kubenswrapper[4757]: I1006 15:10:21.204826 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hchs5\" (UniqueName: \"kubernetes.io/projected/27be7810-9fad-4980-b82f-ce3f6e77a913-kube-api-access-hchs5\") pod \"27be7810-9fad-4980-b82f-ce3f6e77a913\" (UID: \"27be7810-9fad-4980-b82f-ce3f6e77a913\") " Oct 06 15:10:21 crc kubenswrapper[4757]: I1006 15:10:21.205714 4757 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/27be7810-9fad-4980-b82f-ce3f6e77a913-logs\") on node \"crc\" DevicePath \"\"" Oct 06 15:10:21 crc kubenswrapper[4757]: I1006 15:10:21.211810 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/27be7810-9fad-4980-b82f-ce3f6e77a913-kube-api-access-hchs5" (OuterVolumeSpecName: "kube-api-access-hchs5") pod "27be7810-9fad-4980-b82f-ce3f6e77a913" (UID: "27be7810-9fad-4980-b82f-ce3f6e77a913"). InnerVolumeSpecName "kube-api-access-hchs5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 15:10:21 crc kubenswrapper[4757]: I1006 15:10:21.212273 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/27be7810-9fad-4980-b82f-ce3f6e77a913-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "27be7810-9fad-4980-b82f-ce3f6e77a913" (UID: "27be7810-9fad-4980-b82f-ce3f6e77a913"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 15:10:21 crc kubenswrapper[4757]: I1006 15:10:21.232639 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/27be7810-9fad-4980-b82f-ce3f6e77a913-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "27be7810-9fad-4980-b82f-ce3f6e77a913" (UID: "27be7810-9fad-4980-b82f-ce3f6e77a913"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 15:10:21 crc kubenswrapper[4757]: I1006 15:10:21.253515 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/27be7810-9fad-4980-b82f-ce3f6e77a913-config-data" (OuterVolumeSpecName: "config-data") pod "27be7810-9fad-4980-b82f-ce3f6e77a913" (UID: "27be7810-9fad-4980-b82f-ce3f6e77a913"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 15:10:21 crc kubenswrapper[4757]: I1006 15:10:21.309003 4757 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/27be7810-9fad-4980-b82f-ce3f6e77a913-config-data-custom\") on node \"crc\" DevicePath \"\"" Oct 06 15:10:21 crc kubenswrapper[4757]: I1006 15:10:21.309028 4757 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/27be7810-9fad-4980-b82f-ce3f6e77a913-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 06 15:10:21 crc kubenswrapper[4757]: I1006 15:10:21.309039 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hchs5\" (UniqueName: \"kubernetes.io/projected/27be7810-9fad-4980-b82f-ce3f6e77a913-kube-api-access-hchs5\") on node \"crc\" DevicePath \"\"" Oct 06 15:10:21 crc kubenswrapper[4757]: I1006 15:10:21.309051 4757 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/27be7810-9fad-4980-b82f-ce3f6e77a913-config-data\") on node \"crc\" DevicePath \"\"" Oct 06 15:10:21 crc kubenswrapper[4757]: I1006 15:10:21.497139 4757 generic.go:334] "Generic (PLEG): container finished" podID="27be7810-9fad-4980-b82f-ce3f6e77a913" containerID="edc81fef501409be291a17ad2344a67982595b65c17ad687ab9b68b2b25343a0" exitCode=0 Oct 06 15:10:21 crc kubenswrapper[4757]: I1006 15:10:21.497223 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5fbd45848d-wkv66" Oct 06 15:10:21 crc kubenswrapper[4757]: I1006 15:10:21.497230 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5fbd45848d-wkv66" event={"ID":"27be7810-9fad-4980-b82f-ce3f6e77a913","Type":"ContainerDied","Data":"edc81fef501409be291a17ad2344a67982595b65c17ad687ab9b68b2b25343a0"} Oct 06 15:10:21 crc kubenswrapper[4757]: I1006 15:10:21.497888 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5fbd45848d-wkv66" event={"ID":"27be7810-9fad-4980-b82f-ce3f6e77a913","Type":"ContainerDied","Data":"a14f1a009fbf3ec24b75bba4fa0d070d30040b2b25b9649aa3d12ef248223910"} Oct 06 15:10:21 crc kubenswrapper[4757]: I1006 15:10:21.497928 4757 scope.go:117] "RemoveContainer" containerID="edc81fef501409be291a17ad2344a67982595b65c17ad687ab9b68b2b25343a0" Oct 06 15:10:21 crc kubenswrapper[4757]: I1006 15:10:21.529038 4757 scope.go:117] "RemoveContainer" containerID="e3b150aa863dd1c9fbea677efdf76ba62cbb34ee2a7aee8abbbd6e9ee1262356" Oct 06 15:10:21 crc kubenswrapper[4757]: I1006 15:10:21.546582 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-5fbd45848d-wkv66"] Oct 06 15:10:21 crc kubenswrapper[4757]: I1006 15:10:21.554684 4757 scope.go:117] "RemoveContainer" containerID="edc81fef501409be291a17ad2344a67982595b65c17ad687ab9b68b2b25343a0" Oct 06 15:10:21 crc kubenswrapper[4757]: E1006 15:10:21.556027 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"edc81fef501409be291a17ad2344a67982595b65c17ad687ab9b68b2b25343a0\": container with ID starting with edc81fef501409be291a17ad2344a67982595b65c17ad687ab9b68b2b25343a0 not found: ID does not exist" containerID="edc81fef501409be291a17ad2344a67982595b65c17ad687ab9b68b2b25343a0" Oct 06 15:10:21 crc kubenswrapper[4757]: I1006 15:10:21.556082 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"edc81fef501409be291a17ad2344a67982595b65c17ad687ab9b68b2b25343a0"} err="failed to get container status \"edc81fef501409be291a17ad2344a67982595b65c17ad687ab9b68b2b25343a0\": rpc error: code = NotFound desc = could not find container \"edc81fef501409be291a17ad2344a67982595b65c17ad687ab9b68b2b25343a0\": container with ID starting with edc81fef501409be291a17ad2344a67982595b65c17ad687ab9b68b2b25343a0 not found: ID does not exist" Oct 06 15:10:21 crc kubenswrapper[4757]: I1006 15:10:21.556124 4757 scope.go:117] "RemoveContainer" containerID="e3b150aa863dd1c9fbea677efdf76ba62cbb34ee2a7aee8abbbd6e9ee1262356" Oct 06 15:10:21 crc kubenswrapper[4757]: E1006 15:10:21.556556 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e3b150aa863dd1c9fbea677efdf76ba62cbb34ee2a7aee8abbbd6e9ee1262356\": container with ID starting with e3b150aa863dd1c9fbea677efdf76ba62cbb34ee2a7aee8abbbd6e9ee1262356 not found: ID does not exist" containerID="e3b150aa863dd1c9fbea677efdf76ba62cbb34ee2a7aee8abbbd6e9ee1262356" Oct 06 15:10:21 crc kubenswrapper[4757]: I1006 15:10:21.556589 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e3b150aa863dd1c9fbea677efdf76ba62cbb34ee2a7aee8abbbd6e9ee1262356"} err="failed to get container status \"e3b150aa863dd1c9fbea677efdf76ba62cbb34ee2a7aee8abbbd6e9ee1262356\": rpc error: code = NotFound desc = could not find container \"e3b150aa863dd1c9fbea677efdf76ba62cbb34ee2a7aee8abbbd6e9ee1262356\": container with ID starting with e3b150aa863dd1c9fbea677efdf76ba62cbb34ee2a7aee8abbbd6e9ee1262356 not found: ID does not exist" Oct 06 15:10:21 crc kubenswrapper[4757]: I1006 15:10:21.556719 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-5fbd45848d-wkv66"] Oct 06 15:10:22 crc kubenswrapper[4757]: I1006 15:10:22.198522 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="27be7810-9fad-4980-b82f-ce3f6e77a913" path="/var/lib/kubelet/pods/27be7810-9fad-4980-b82f-ce3f6e77a913/volumes" Oct 06 15:10:38 crc kubenswrapper[4757]: I1006 15:10:38.690313 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-rvnm9"] Oct 06 15:10:38 crc kubenswrapper[4757]: E1006 15:10:38.693266 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27be7810-9fad-4980-b82f-ce3f6e77a913" containerName="barbican-api-log" Oct 06 15:10:38 crc kubenswrapper[4757]: I1006 15:10:38.693288 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="27be7810-9fad-4980-b82f-ce3f6e77a913" containerName="barbican-api-log" Oct 06 15:10:38 crc kubenswrapper[4757]: E1006 15:10:38.693310 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c99d116-9378-471e-a119-1a468f151a77" containerName="dnsmasq-dns" Oct 06 15:10:38 crc kubenswrapper[4757]: I1006 15:10:38.693317 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c99d116-9378-471e-a119-1a468f151a77" containerName="dnsmasq-dns" Oct 06 15:10:38 crc kubenswrapper[4757]: E1006 15:10:38.693341 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27be7810-9fad-4980-b82f-ce3f6e77a913" containerName="barbican-api" Oct 06 15:10:38 crc kubenswrapper[4757]: I1006 15:10:38.693347 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="27be7810-9fad-4980-b82f-ce3f6e77a913" containerName="barbican-api" Oct 06 15:10:38 crc kubenswrapper[4757]: E1006 15:10:38.693363 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c99d116-9378-471e-a119-1a468f151a77" containerName="init" Oct 06 15:10:38 crc kubenswrapper[4757]: I1006 15:10:38.693368 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c99d116-9378-471e-a119-1a468f151a77" containerName="init" Oct 06 15:10:38 crc kubenswrapper[4757]: I1006 15:10:38.693548 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="27be7810-9fad-4980-b82f-ce3f6e77a913" containerName="barbican-api" Oct 06 15:10:38 crc kubenswrapper[4757]: I1006 15:10:38.693565 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="27be7810-9fad-4980-b82f-ce3f6e77a913" containerName="barbican-api-log" Oct 06 15:10:38 crc kubenswrapper[4757]: I1006 15:10:38.693580 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="8c99d116-9378-471e-a119-1a468f151a77" containerName="dnsmasq-dns" Oct 06 15:10:38 crc kubenswrapper[4757]: I1006 15:10:38.694160 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-rvnm9" Oct 06 15:10:38 crc kubenswrapper[4757]: I1006 15:10:38.702918 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-rvnm9"] Oct 06 15:10:38 crc kubenswrapper[4757]: I1006 15:10:38.751833 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9jkcs\" (UniqueName: \"kubernetes.io/projected/7fa8484b-cc8e-4b19-a476-d3cec87675a3-kube-api-access-9jkcs\") pod \"neutron-db-create-rvnm9\" (UID: \"7fa8484b-cc8e-4b19-a476-d3cec87675a3\") " pod="openstack/neutron-db-create-rvnm9" Oct 06 15:10:38 crc kubenswrapper[4757]: I1006 15:10:38.853269 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9jkcs\" (UniqueName: \"kubernetes.io/projected/7fa8484b-cc8e-4b19-a476-d3cec87675a3-kube-api-access-9jkcs\") pod \"neutron-db-create-rvnm9\" (UID: \"7fa8484b-cc8e-4b19-a476-d3cec87675a3\") " pod="openstack/neutron-db-create-rvnm9" Oct 06 15:10:38 crc kubenswrapper[4757]: I1006 15:10:38.879863 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9jkcs\" (UniqueName: \"kubernetes.io/projected/7fa8484b-cc8e-4b19-a476-d3cec87675a3-kube-api-access-9jkcs\") pod \"neutron-db-create-rvnm9\" (UID: \"7fa8484b-cc8e-4b19-a476-d3cec87675a3\") " pod="openstack/neutron-db-create-rvnm9" Oct 06 15:10:39 crc kubenswrapper[4757]: I1006 15:10:39.015553 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-rvnm9" Oct 06 15:10:39 crc kubenswrapper[4757]: I1006 15:10:39.579638 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-rvnm9"] Oct 06 15:10:39 crc kubenswrapper[4757]: I1006 15:10:39.652878 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-rvnm9" event={"ID":"7fa8484b-cc8e-4b19-a476-d3cec87675a3","Type":"ContainerStarted","Data":"3d0be1d2e2c55f95810267148dee739c1545298933bdd74aadcac3de08b85190"} Oct 06 15:10:40 crc kubenswrapper[4757]: I1006 15:10:40.664798 4757 generic.go:334] "Generic (PLEG): container finished" podID="7fa8484b-cc8e-4b19-a476-d3cec87675a3" containerID="151053d9dca5ae5a8789ddf4352ce72cae0e60b5e789f395a6ca58e076db6513" exitCode=0 Oct 06 15:10:40 crc kubenswrapper[4757]: I1006 15:10:40.664840 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-rvnm9" event={"ID":"7fa8484b-cc8e-4b19-a476-d3cec87675a3","Type":"ContainerDied","Data":"151053d9dca5ae5a8789ddf4352ce72cae0e60b5e789f395a6ca58e076db6513"} Oct 06 15:10:42 crc kubenswrapper[4757]: I1006 15:10:42.063925 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-rvnm9" Oct 06 15:10:42 crc kubenswrapper[4757]: I1006 15:10:42.226035 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9jkcs\" (UniqueName: \"kubernetes.io/projected/7fa8484b-cc8e-4b19-a476-d3cec87675a3-kube-api-access-9jkcs\") pod \"7fa8484b-cc8e-4b19-a476-d3cec87675a3\" (UID: \"7fa8484b-cc8e-4b19-a476-d3cec87675a3\") " Oct 06 15:10:42 crc kubenswrapper[4757]: I1006 15:10:42.233381 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7fa8484b-cc8e-4b19-a476-d3cec87675a3-kube-api-access-9jkcs" (OuterVolumeSpecName: "kube-api-access-9jkcs") pod "7fa8484b-cc8e-4b19-a476-d3cec87675a3" (UID: "7fa8484b-cc8e-4b19-a476-d3cec87675a3"). InnerVolumeSpecName "kube-api-access-9jkcs". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 15:10:42 crc kubenswrapper[4757]: I1006 15:10:42.234579 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9jkcs\" (UniqueName: \"kubernetes.io/projected/7fa8484b-cc8e-4b19-a476-d3cec87675a3-kube-api-access-9jkcs\") on node \"crc\" DevicePath \"\"" Oct 06 15:10:42 crc kubenswrapper[4757]: I1006 15:10:42.691439 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-rvnm9" event={"ID":"7fa8484b-cc8e-4b19-a476-d3cec87675a3","Type":"ContainerDied","Data":"3d0be1d2e2c55f95810267148dee739c1545298933bdd74aadcac3de08b85190"} Oct 06 15:10:42 crc kubenswrapper[4757]: I1006 15:10:42.691505 4757 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3d0be1d2e2c55f95810267148dee739c1545298933bdd74aadcac3de08b85190" Oct 06 15:10:42 crc kubenswrapper[4757]: I1006 15:10:42.691528 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-rvnm9" Oct 06 15:10:48 crc kubenswrapper[4757]: I1006 15:10:48.815627 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-528f-account-create-xkcdt"] Oct 06 15:10:48 crc kubenswrapper[4757]: E1006 15:10:48.816974 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7fa8484b-cc8e-4b19-a476-d3cec87675a3" containerName="mariadb-database-create" Oct 06 15:10:48 crc kubenswrapper[4757]: I1006 15:10:48.816994 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="7fa8484b-cc8e-4b19-a476-d3cec87675a3" containerName="mariadb-database-create" Oct 06 15:10:48 crc kubenswrapper[4757]: I1006 15:10:48.817220 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="7fa8484b-cc8e-4b19-a476-d3cec87675a3" containerName="mariadb-database-create" Oct 06 15:10:48 crc kubenswrapper[4757]: I1006 15:10:48.820343 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-528f-account-create-xkcdt" Oct 06 15:10:48 crc kubenswrapper[4757]: I1006 15:10:48.826313 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-528f-account-create-xkcdt"] Oct 06 15:10:48 crc kubenswrapper[4757]: I1006 15:10:48.827314 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Oct 06 15:10:48 crc kubenswrapper[4757]: I1006 15:10:48.958080 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2vks9\" (UniqueName: \"kubernetes.io/projected/95694796-073a-422c-83d8-97a9a5b7e000-kube-api-access-2vks9\") pod \"neutron-528f-account-create-xkcdt\" (UID: \"95694796-073a-422c-83d8-97a9a5b7e000\") " pod="openstack/neutron-528f-account-create-xkcdt" Oct 06 15:10:49 crc kubenswrapper[4757]: I1006 15:10:49.060527 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2vks9\" (UniqueName: \"kubernetes.io/projected/95694796-073a-422c-83d8-97a9a5b7e000-kube-api-access-2vks9\") pod \"neutron-528f-account-create-xkcdt\" (UID: \"95694796-073a-422c-83d8-97a9a5b7e000\") " pod="openstack/neutron-528f-account-create-xkcdt" Oct 06 15:10:49 crc kubenswrapper[4757]: I1006 15:10:49.086922 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2vks9\" (UniqueName: \"kubernetes.io/projected/95694796-073a-422c-83d8-97a9a5b7e000-kube-api-access-2vks9\") pod \"neutron-528f-account-create-xkcdt\" (UID: \"95694796-073a-422c-83d8-97a9a5b7e000\") " pod="openstack/neutron-528f-account-create-xkcdt" Oct 06 15:10:49 crc kubenswrapper[4757]: I1006 15:10:49.142799 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-528f-account-create-xkcdt" Oct 06 15:10:49 crc kubenswrapper[4757]: I1006 15:10:49.429174 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-528f-account-create-xkcdt"] Oct 06 15:10:49 crc kubenswrapper[4757]: I1006 15:10:49.763064 4757 generic.go:334] "Generic (PLEG): container finished" podID="95694796-073a-422c-83d8-97a9a5b7e000" containerID="3e3786e9d82f7fe79a54165d65f21d57aa16e62dcc4280989ca50f051b666f90" exitCode=0 Oct 06 15:10:49 crc kubenswrapper[4757]: I1006 15:10:49.763152 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-528f-account-create-xkcdt" event={"ID":"95694796-073a-422c-83d8-97a9a5b7e000","Type":"ContainerDied","Data":"3e3786e9d82f7fe79a54165d65f21d57aa16e62dcc4280989ca50f051b666f90"} Oct 06 15:10:49 crc kubenswrapper[4757]: I1006 15:10:49.763228 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-528f-account-create-xkcdt" event={"ID":"95694796-073a-422c-83d8-97a9a5b7e000","Type":"ContainerStarted","Data":"036f5c6d380019266029da082b672a0a66b57ddf69bb6e6be6501a6a28766c28"} Oct 06 15:10:51 crc kubenswrapper[4757]: I1006 15:10:51.083618 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-528f-account-create-xkcdt" Oct 06 15:10:51 crc kubenswrapper[4757]: I1006 15:10:51.200064 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2vks9\" (UniqueName: \"kubernetes.io/projected/95694796-073a-422c-83d8-97a9a5b7e000-kube-api-access-2vks9\") pod \"95694796-073a-422c-83d8-97a9a5b7e000\" (UID: \"95694796-073a-422c-83d8-97a9a5b7e000\") " Oct 06 15:10:51 crc kubenswrapper[4757]: I1006 15:10:51.211363 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/95694796-073a-422c-83d8-97a9a5b7e000-kube-api-access-2vks9" (OuterVolumeSpecName: "kube-api-access-2vks9") pod "95694796-073a-422c-83d8-97a9a5b7e000" (UID: "95694796-073a-422c-83d8-97a9a5b7e000"). InnerVolumeSpecName "kube-api-access-2vks9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 15:10:51 crc kubenswrapper[4757]: I1006 15:10:51.302504 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2vks9\" (UniqueName: \"kubernetes.io/projected/95694796-073a-422c-83d8-97a9a5b7e000-kube-api-access-2vks9\") on node \"crc\" DevicePath \"\"" Oct 06 15:10:51 crc kubenswrapper[4757]: I1006 15:10:51.782698 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-528f-account-create-xkcdt" event={"ID":"95694796-073a-422c-83d8-97a9a5b7e000","Type":"ContainerDied","Data":"036f5c6d380019266029da082b672a0a66b57ddf69bb6e6be6501a6a28766c28"} Oct 06 15:10:51 crc kubenswrapper[4757]: I1006 15:10:51.782780 4757 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="036f5c6d380019266029da082b672a0a66b57ddf69bb6e6be6501a6a28766c28" Oct 06 15:10:51 crc kubenswrapper[4757]: I1006 15:10:51.782748 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-528f-account-create-xkcdt" Oct 06 15:10:53 crc kubenswrapper[4757]: I1006 15:10:53.979497 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-n59jv"] Oct 06 15:10:53 crc kubenswrapper[4757]: E1006 15:10:53.980305 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="95694796-073a-422c-83d8-97a9a5b7e000" containerName="mariadb-account-create" Oct 06 15:10:53 crc kubenswrapper[4757]: I1006 15:10:53.980327 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="95694796-073a-422c-83d8-97a9a5b7e000" containerName="mariadb-account-create" Oct 06 15:10:53 crc kubenswrapper[4757]: I1006 15:10:53.980589 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="95694796-073a-422c-83d8-97a9a5b7e000" containerName="mariadb-account-create" Oct 06 15:10:53 crc kubenswrapper[4757]: I1006 15:10:53.981496 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-n59jv" Oct 06 15:10:53 crc kubenswrapper[4757]: I1006 15:10:53.985706 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Oct 06 15:10:53 crc kubenswrapper[4757]: I1006 15:10:53.985886 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-f78nq" Oct 06 15:10:53 crc kubenswrapper[4757]: I1006 15:10:53.985942 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Oct 06 15:10:53 crc kubenswrapper[4757]: I1006 15:10:53.995046 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-n59jv"] Oct 06 15:10:54 crc kubenswrapper[4757]: I1006 15:10:54.047840 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9b02124-fbb0-4055-9eb0-7e7f39db93e3-combined-ca-bundle\") pod \"neutron-db-sync-n59jv\" (UID: \"e9b02124-fbb0-4055-9eb0-7e7f39db93e3\") " pod="openstack/neutron-db-sync-n59jv" Oct 06 15:10:54 crc kubenswrapper[4757]: I1006 15:10:54.047903 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dg4sn\" (UniqueName: \"kubernetes.io/projected/e9b02124-fbb0-4055-9eb0-7e7f39db93e3-kube-api-access-dg4sn\") pod \"neutron-db-sync-n59jv\" (UID: \"e9b02124-fbb0-4055-9eb0-7e7f39db93e3\") " pod="openstack/neutron-db-sync-n59jv" Oct 06 15:10:54 crc kubenswrapper[4757]: I1006 15:10:54.048035 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/e9b02124-fbb0-4055-9eb0-7e7f39db93e3-config\") pod \"neutron-db-sync-n59jv\" (UID: \"e9b02124-fbb0-4055-9eb0-7e7f39db93e3\") " pod="openstack/neutron-db-sync-n59jv" Oct 06 15:10:54 crc kubenswrapper[4757]: I1006 15:10:54.150317 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9b02124-fbb0-4055-9eb0-7e7f39db93e3-combined-ca-bundle\") pod \"neutron-db-sync-n59jv\" (UID: \"e9b02124-fbb0-4055-9eb0-7e7f39db93e3\") " pod="openstack/neutron-db-sync-n59jv" Oct 06 15:10:54 crc kubenswrapper[4757]: I1006 15:10:54.150418 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dg4sn\" (UniqueName: \"kubernetes.io/projected/e9b02124-fbb0-4055-9eb0-7e7f39db93e3-kube-api-access-dg4sn\") pod \"neutron-db-sync-n59jv\" (UID: \"e9b02124-fbb0-4055-9eb0-7e7f39db93e3\") " pod="openstack/neutron-db-sync-n59jv" Oct 06 15:10:54 crc kubenswrapper[4757]: I1006 15:10:54.150603 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/e9b02124-fbb0-4055-9eb0-7e7f39db93e3-config\") pod \"neutron-db-sync-n59jv\" (UID: \"e9b02124-fbb0-4055-9eb0-7e7f39db93e3\") " pod="openstack/neutron-db-sync-n59jv" Oct 06 15:10:54 crc kubenswrapper[4757]: I1006 15:10:54.160991 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/e9b02124-fbb0-4055-9eb0-7e7f39db93e3-config\") pod \"neutron-db-sync-n59jv\" (UID: \"e9b02124-fbb0-4055-9eb0-7e7f39db93e3\") " pod="openstack/neutron-db-sync-n59jv" Oct 06 15:10:54 crc kubenswrapper[4757]: I1006 15:10:54.165642 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9b02124-fbb0-4055-9eb0-7e7f39db93e3-combined-ca-bundle\") pod \"neutron-db-sync-n59jv\" (UID: \"e9b02124-fbb0-4055-9eb0-7e7f39db93e3\") " pod="openstack/neutron-db-sync-n59jv" Oct 06 15:10:54 crc kubenswrapper[4757]: I1006 15:10:54.180378 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dg4sn\" (UniqueName: \"kubernetes.io/projected/e9b02124-fbb0-4055-9eb0-7e7f39db93e3-kube-api-access-dg4sn\") pod \"neutron-db-sync-n59jv\" (UID: \"e9b02124-fbb0-4055-9eb0-7e7f39db93e3\") " pod="openstack/neutron-db-sync-n59jv" Oct 06 15:10:54 crc kubenswrapper[4757]: I1006 15:10:54.337686 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-n59jv" Oct 06 15:10:54 crc kubenswrapper[4757]: I1006 15:10:54.776617 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-n59jv"] Oct 06 15:10:54 crc kubenswrapper[4757]: I1006 15:10:54.808907 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-n59jv" event={"ID":"e9b02124-fbb0-4055-9eb0-7e7f39db93e3","Type":"ContainerStarted","Data":"f08b1f8f245f0c654dca96ab556231a566287fcaf87ce57172f37493b6041d64"} Oct 06 15:10:55 crc kubenswrapper[4757]: I1006 15:10:55.818626 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-n59jv" event={"ID":"e9b02124-fbb0-4055-9eb0-7e7f39db93e3","Type":"ContainerStarted","Data":"c4a6e19220ec5963bfd0912f590d02c865039ff416f71869646b29c65f9a2297"} Oct 06 15:10:55 crc kubenswrapper[4757]: I1006 15:10:55.839620 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-n59jv" podStartSLOduration=2.839596781 podStartE2EDuration="2.839596781s" podCreationTimestamp="2025-10-06 15:10:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 15:10:55.833183924 +0000 UTC m=+5544.330502471" watchObservedRunningTime="2025-10-06 15:10:55.839596781 +0000 UTC m=+5544.336915318" Oct 06 15:10:59 crc kubenswrapper[4757]: I1006 15:10:59.860560 4757 generic.go:334] "Generic (PLEG): container finished" podID="e9b02124-fbb0-4055-9eb0-7e7f39db93e3" containerID="c4a6e19220ec5963bfd0912f590d02c865039ff416f71869646b29c65f9a2297" exitCode=0 Oct 06 15:10:59 crc kubenswrapper[4757]: I1006 15:10:59.860633 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-n59jv" event={"ID":"e9b02124-fbb0-4055-9eb0-7e7f39db93e3","Type":"ContainerDied","Data":"c4a6e19220ec5963bfd0912f590d02c865039ff416f71869646b29c65f9a2297"} Oct 06 15:11:01 crc kubenswrapper[4757]: I1006 15:11:01.198304 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-n59jv" Oct 06 15:11:01 crc kubenswrapper[4757]: I1006 15:11:01.278696 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9b02124-fbb0-4055-9eb0-7e7f39db93e3-combined-ca-bundle\") pod \"e9b02124-fbb0-4055-9eb0-7e7f39db93e3\" (UID: \"e9b02124-fbb0-4055-9eb0-7e7f39db93e3\") " Oct 06 15:11:01 crc kubenswrapper[4757]: I1006 15:11:01.278889 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/e9b02124-fbb0-4055-9eb0-7e7f39db93e3-config\") pod \"e9b02124-fbb0-4055-9eb0-7e7f39db93e3\" (UID: \"e9b02124-fbb0-4055-9eb0-7e7f39db93e3\") " Oct 06 15:11:01 crc kubenswrapper[4757]: I1006 15:11:01.278921 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dg4sn\" (UniqueName: \"kubernetes.io/projected/e9b02124-fbb0-4055-9eb0-7e7f39db93e3-kube-api-access-dg4sn\") pod \"e9b02124-fbb0-4055-9eb0-7e7f39db93e3\" (UID: \"e9b02124-fbb0-4055-9eb0-7e7f39db93e3\") " Oct 06 15:11:01 crc kubenswrapper[4757]: I1006 15:11:01.287557 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e9b02124-fbb0-4055-9eb0-7e7f39db93e3-kube-api-access-dg4sn" (OuterVolumeSpecName: "kube-api-access-dg4sn") pod "e9b02124-fbb0-4055-9eb0-7e7f39db93e3" (UID: "e9b02124-fbb0-4055-9eb0-7e7f39db93e3"). InnerVolumeSpecName "kube-api-access-dg4sn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 15:11:01 crc kubenswrapper[4757]: I1006 15:11:01.316693 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e9b02124-fbb0-4055-9eb0-7e7f39db93e3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e9b02124-fbb0-4055-9eb0-7e7f39db93e3" (UID: "e9b02124-fbb0-4055-9eb0-7e7f39db93e3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 15:11:01 crc kubenswrapper[4757]: I1006 15:11:01.320649 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e9b02124-fbb0-4055-9eb0-7e7f39db93e3-config" (OuterVolumeSpecName: "config") pod "e9b02124-fbb0-4055-9eb0-7e7f39db93e3" (UID: "e9b02124-fbb0-4055-9eb0-7e7f39db93e3"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 15:11:01 crc kubenswrapper[4757]: I1006 15:11:01.381506 4757 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/e9b02124-fbb0-4055-9eb0-7e7f39db93e3-config\") on node \"crc\" DevicePath \"\"" Oct 06 15:11:01 crc kubenswrapper[4757]: I1006 15:11:01.381538 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dg4sn\" (UniqueName: \"kubernetes.io/projected/e9b02124-fbb0-4055-9eb0-7e7f39db93e3-kube-api-access-dg4sn\") on node \"crc\" DevicePath \"\"" Oct 06 15:11:01 crc kubenswrapper[4757]: I1006 15:11:01.381552 4757 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9b02124-fbb0-4055-9eb0-7e7f39db93e3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 06 15:11:01 crc kubenswrapper[4757]: I1006 15:11:01.878429 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-n59jv" event={"ID":"e9b02124-fbb0-4055-9eb0-7e7f39db93e3","Type":"ContainerDied","Data":"f08b1f8f245f0c654dca96ab556231a566287fcaf87ce57172f37493b6041d64"} Oct 06 15:11:01 crc kubenswrapper[4757]: I1006 15:11:01.878487 4757 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f08b1f8f245f0c654dca96ab556231a566287fcaf87ce57172f37493b6041d64" Oct 06 15:11:01 crc kubenswrapper[4757]: I1006 15:11:01.878496 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-n59jv" Oct 06 15:11:02 crc kubenswrapper[4757]: I1006 15:11:02.029236 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5bb5b457f7-dd6rq"] Oct 06 15:11:02 crc kubenswrapper[4757]: E1006 15:11:02.042847 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e9b02124-fbb0-4055-9eb0-7e7f39db93e3" containerName="neutron-db-sync" Oct 06 15:11:02 crc kubenswrapper[4757]: I1006 15:11:02.042881 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="e9b02124-fbb0-4055-9eb0-7e7f39db93e3" containerName="neutron-db-sync" Oct 06 15:11:02 crc kubenswrapper[4757]: I1006 15:11:02.043117 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="e9b02124-fbb0-4055-9eb0-7e7f39db93e3" containerName="neutron-db-sync" Oct 06 15:11:02 crc kubenswrapper[4757]: I1006 15:11:02.046672 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5bb5b457f7-dd6rq"] Oct 06 15:11:02 crc kubenswrapper[4757]: I1006 15:11:02.047010 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bb5b457f7-dd6rq" Oct 06 15:11:02 crc kubenswrapper[4757]: I1006 15:11:02.092958 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2c3c9933-3b11-4fc8-a1a2-ef87ba376118-dns-svc\") pod \"dnsmasq-dns-5bb5b457f7-dd6rq\" (UID: \"2c3c9933-3b11-4fc8-a1a2-ef87ba376118\") " pod="openstack/dnsmasq-dns-5bb5b457f7-dd6rq" Oct 06 15:11:02 crc kubenswrapper[4757]: I1006 15:11:02.093015 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2c3c9933-3b11-4fc8-a1a2-ef87ba376118-config\") pod \"dnsmasq-dns-5bb5b457f7-dd6rq\" (UID: \"2c3c9933-3b11-4fc8-a1a2-ef87ba376118\") " pod="openstack/dnsmasq-dns-5bb5b457f7-dd6rq" Oct 06 15:11:02 crc kubenswrapper[4757]: I1006 15:11:02.093058 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2c3c9933-3b11-4fc8-a1a2-ef87ba376118-ovsdbserver-sb\") pod \"dnsmasq-dns-5bb5b457f7-dd6rq\" (UID: \"2c3c9933-3b11-4fc8-a1a2-ef87ba376118\") " pod="openstack/dnsmasq-dns-5bb5b457f7-dd6rq" Oct 06 15:11:02 crc kubenswrapper[4757]: I1006 15:11:02.093137 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2c3c9933-3b11-4fc8-a1a2-ef87ba376118-ovsdbserver-nb\") pod \"dnsmasq-dns-5bb5b457f7-dd6rq\" (UID: \"2c3c9933-3b11-4fc8-a1a2-ef87ba376118\") " pod="openstack/dnsmasq-dns-5bb5b457f7-dd6rq" Oct 06 15:11:02 crc kubenswrapper[4757]: I1006 15:11:02.093213 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d9jrm\" (UniqueName: \"kubernetes.io/projected/2c3c9933-3b11-4fc8-a1a2-ef87ba376118-kube-api-access-d9jrm\") pod \"dnsmasq-dns-5bb5b457f7-dd6rq\" (UID: \"2c3c9933-3b11-4fc8-a1a2-ef87ba376118\") " pod="openstack/dnsmasq-dns-5bb5b457f7-dd6rq" Oct 06 15:11:02 crc kubenswrapper[4757]: I1006 15:11:02.141604 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-7dc5fcd74d-zllrm"] Oct 06 15:11:02 crc kubenswrapper[4757]: I1006 15:11:02.143548 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7dc5fcd74d-zllrm" Oct 06 15:11:02 crc kubenswrapper[4757]: I1006 15:11:02.148587 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Oct 06 15:11:02 crc kubenswrapper[4757]: I1006 15:11:02.148774 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-f78nq" Oct 06 15:11:02 crc kubenswrapper[4757]: I1006 15:11:02.150335 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Oct 06 15:11:02 crc kubenswrapper[4757]: I1006 15:11:02.150435 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-ovndbs" Oct 06 15:11:02 crc kubenswrapper[4757]: I1006 15:11:02.160217 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-7dc5fcd74d-zllrm"] Oct 06 15:11:02 crc kubenswrapper[4757]: I1006 15:11:02.194527 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2c3c9933-3b11-4fc8-a1a2-ef87ba376118-ovsdbserver-nb\") pod \"dnsmasq-dns-5bb5b457f7-dd6rq\" (UID: \"2c3c9933-3b11-4fc8-a1a2-ef87ba376118\") " pod="openstack/dnsmasq-dns-5bb5b457f7-dd6rq" Oct 06 15:11:02 crc kubenswrapper[4757]: I1006 15:11:02.194637 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d9jrm\" (UniqueName: \"kubernetes.io/projected/2c3c9933-3b11-4fc8-a1a2-ef87ba376118-kube-api-access-d9jrm\") pod \"dnsmasq-dns-5bb5b457f7-dd6rq\" (UID: \"2c3c9933-3b11-4fc8-a1a2-ef87ba376118\") " pod="openstack/dnsmasq-dns-5bb5b457f7-dd6rq" Oct 06 15:11:02 crc kubenswrapper[4757]: I1006 15:11:02.194695 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x8j2b\" (UniqueName: \"kubernetes.io/projected/9361ecc9-790e-45fc-b84d-7786f69f0929-kube-api-access-x8j2b\") pod \"neutron-7dc5fcd74d-zllrm\" (UID: \"9361ecc9-790e-45fc-b84d-7786f69f0929\") " pod="openstack/neutron-7dc5fcd74d-zllrm" Oct 06 15:11:02 crc kubenswrapper[4757]: I1006 15:11:02.194761 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/9361ecc9-790e-45fc-b84d-7786f69f0929-ovndb-tls-certs\") pod \"neutron-7dc5fcd74d-zllrm\" (UID: \"9361ecc9-790e-45fc-b84d-7786f69f0929\") " pod="openstack/neutron-7dc5fcd74d-zllrm" Oct 06 15:11:02 crc kubenswrapper[4757]: I1006 15:11:02.195132 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9361ecc9-790e-45fc-b84d-7786f69f0929-combined-ca-bundle\") pod \"neutron-7dc5fcd74d-zllrm\" (UID: \"9361ecc9-790e-45fc-b84d-7786f69f0929\") " pod="openstack/neutron-7dc5fcd74d-zllrm" Oct 06 15:11:02 crc kubenswrapper[4757]: I1006 15:11:02.195178 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2c3c9933-3b11-4fc8-a1a2-ef87ba376118-config\") pod \"dnsmasq-dns-5bb5b457f7-dd6rq\" (UID: \"2c3c9933-3b11-4fc8-a1a2-ef87ba376118\") " pod="openstack/dnsmasq-dns-5bb5b457f7-dd6rq" Oct 06 15:11:02 crc kubenswrapper[4757]: I1006 15:11:02.195238 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2c3c9933-3b11-4fc8-a1a2-ef87ba376118-dns-svc\") pod \"dnsmasq-dns-5bb5b457f7-dd6rq\" (UID: \"2c3c9933-3b11-4fc8-a1a2-ef87ba376118\") " pod="openstack/dnsmasq-dns-5bb5b457f7-dd6rq" Oct 06 15:11:02 crc kubenswrapper[4757]: I1006 15:11:02.195263 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/9361ecc9-790e-45fc-b84d-7786f69f0929-config\") pod \"neutron-7dc5fcd74d-zllrm\" (UID: \"9361ecc9-790e-45fc-b84d-7786f69f0929\") " pod="openstack/neutron-7dc5fcd74d-zllrm" Oct 06 15:11:02 crc kubenswrapper[4757]: I1006 15:11:02.195290 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/9361ecc9-790e-45fc-b84d-7786f69f0929-httpd-config\") pod \"neutron-7dc5fcd74d-zllrm\" (UID: \"9361ecc9-790e-45fc-b84d-7786f69f0929\") " pod="openstack/neutron-7dc5fcd74d-zllrm" Oct 06 15:11:02 crc kubenswrapper[4757]: I1006 15:11:02.195324 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2c3c9933-3b11-4fc8-a1a2-ef87ba376118-ovsdbserver-sb\") pod \"dnsmasq-dns-5bb5b457f7-dd6rq\" (UID: \"2c3c9933-3b11-4fc8-a1a2-ef87ba376118\") " pod="openstack/dnsmasq-dns-5bb5b457f7-dd6rq" Oct 06 15:11:02 crc kubenswrapper[4757]: I1006 15:11:02.196594 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2c3c9933-3b11-4fc8-a1a2-ef87ba376118-ovsdbserver-nb\") pod \"dnsmasq-dns-5bb5b457f7-dd6rq\" (UID: \"2c3c9933-3b11-4fc8-a1a2-ef87ba376118\") " pod="openstack/dnsmasq-dns-5bb5b457f7-dd6rq" Oct 06 15:11:02 crc kubenswrapper[4757]: I1006 15:11:02.197025 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2c3c9933-3b11-4fc8-a1a2-ef87ba376118-config\") pod \"dnsmasq-dns-5bb5b457f7-dd6rq\" (UID: \"2c3c9933-3b11-4fc8-a1a2-ef87ba376118\") " pod="openstack/dnsmasq-dns-5bb5b457f7-dd6rq" Oct 06 15:11:02 crc kubenswrapper[4757]: I1006 15:11:02.197780 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2c3c9933-3b11-4fc8-a1a2-ef87ba376118-dns-svc\") pod \"dnsmasq-dns-5bb5b457f7-dd6rq\" (UID: \"2c3c9933-3b11-4fc8-a1a2-ef87ba376118\") " pod="openstack/dnsmasq-dns-5bb5b457f7-dd6rq" Oct 06 15:11:02 crc kubenswrapper[4757]: I1006 15:11:02.197933 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2c3c9933-3b11-4fc8-a1a2-ef87ba376118-ovsdbserver-sb\") pod \"dnsmasq-dns-5bb5b457f7-dd6rq\" (UID: \"2c3c9933-3b11-4fc8-a1a2-ef87ba376118\") " pod="openstack/dnsmasq-dns-5bb5b457f7-dd6rq" Oct 06 15:11:02 crc kubenswrapper[4757]: I1006 15:11:02.218880 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d9jrm\" (UniqueName: \"kubernetes.io/projected/2c3c9933-3b11-4fc8-a1a2-ef87ba376118-kube-api-access-d9jrm\") pod \"dnsmasq-dns-5bb5b457f7-dd6rq\" (UID: \"2c3c9933-3b11-4fc8-a1a2-ef87ba376118\") " pod="openstack/dnsmasq-dns-5bb5b457f7-dd6rq" Oct 06 15:11:02 crc kubenswrapper[4757]: I1006 15:11:02.299046 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x8j2b\" (UniqueName: \"kubernetes.io/projected/9361ecc9-790e-45fc-b84d-7786f69f0929-kube-api-access-x8j2b\") pod \"neutron-7dc5fcd74d-zllrm\" (UID: \"9361ecc9-790e-45fc-b84d-7786f69f0929\") " pod="openstack/neutron-7dc5fcd74d-zllrm" Oct 06 15:11:02 crc kubenswrapper[4757]: I1006 15:11:02.299147 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/9361ecc9-790e-45fc-b84d-7786f69f0929-ovndb-tls-certs\") pod \"neutron-7dc5fcd74d-zllrm\" (UID: \"9361ecc9-790e-45fc-b84d-7786f69f0929\") " pod="openstack/neutron-7dc5fcd74d-zllrm" Oct 06 15:11:02 crc kubenswrapper[4757]: I1006 15:11:02.299165 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9361ecc9-790e-45fc-b84d-7786f69f0929-combined-ca-bundle\") pod \"neutron-7dc5fcd74d-zllrm\" (UID: \"9361ecc9-790e-45fc-b84d-7786f69f0929\") " pod="openstack/neutron-7dc5fcd74d-zllrm" Oct 06 15:11:02 crc kubenswrapper[4757]: I1006 15:11:02.299188 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/9361ecc9-790e-45fc-b84d-7786f69f0929-config\") pod \"neutron-7dc5fcd74d-zllrm\" (UID: \"9361ecc9-790e-45fc-b84d-7786f69f0929\") " pod="openstack/neutron-7dc5fcd74d-zllrm" Oct 06 15:11:02 crc kubenswrapper[4757]: I1006 15:11:02.299208 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/9361ecc9-790e-45fc-b84d-7786f69f0929-httpd-config\") pod \"neutron-7dc5fcd74d-zllrm\" (UID: \"9361ecc9-790e-45fc-b84d-7786f69f0929\") " pod="openstack/neutron-7dc5fcd74d-zllrm" Oct 06 15:11:02 crc kubenswrapper[4757]: I1006 15:11:02.303492 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9361ecc9-790e-45fc-b84d-7786f69f0929-combined-ca-bundle\") pod \"neutron-7dc5fcd74d-zllrm\" (UID: \"9361ecc9-790e-45fc-b84d-7786f69f0929\") " pod="openstack/neutron-7dc5fcd74d-zllrm" Oct 06 15:11:02 crc kubenswrapper[4757]: I1006 15:11:02.303509 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/9361ecc9-790e-45fc-b84d-7786f69f0929-config\") pod \"neutron-7dc5fcd74d-zllrm\" (UID: \"9361ecc9-790e-45fc-b84d-7786f69f0929\") " pod="openstack/neutron-7dc5fcd74d-zllrm" Oct 06 15:11:02 crc kubenswrapper[4757]: I1006 15:11:02.304266 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/9361ecc9-790e-45fc-b84d-7786f69f0929-httpd-config\") pod \"neutron-7dc5fcd74d-zllrm\" (UID: \"9361ecc9-790e-45fc-b84d-7786f69f0929\") " pod="openstack/neutron-7dc5fcd74d-zllrm" Oct 06 15:11:02 crc kubenswrapper[4757]: I1006 15:11:02.305609 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/9361ecc9-790e-45fc-b84d-7786f69f0929-ovndb-tls-certs\") pod \"neutron-7dc5fcd74d-zllrm\" (UID: \"9361ecc9-790e-45fc-b84d-7786f69f0929\") " pod="openstack/neutron-7dc5fcd74d-zllrm" Oct 06 15:11:02 crc kubenswrapper[4757]: I1006 15:11:02.316735 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x8j2b\" (UniqueName: \"kubernetes.io/projected/9361ecc9-790e-45fc-b84d-7786f69f0929-kube-api-access-x8j2b\") pod \"neutron-7dc5fcd74d-zllrm\" (UID: \"9361ecc9-790e-45fc-b84d-7786f69f0929\") " pod="openstack/neutron-7dc5fcd74d-zllrm" Oct 06 15:11:02 crc kubenswrapper[4757]: I1006 15:11:02.375037 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bb5b457f7-dd6rq" Oct 06 15:11:02 crc kubenswrapper[4757]: I1006 15:11:02.475551 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7dc5fcd74d-zllrm" Oct 06 15:11:02 crc kubenswrapper[4757]: I1006 15:11:02.934304 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5bb5b457f7-dd6rq"] Oct 06 15:11:03 crc kubenswrapper[4757]: I1006 15:11:03.194860 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-7dc5fcd74d-zllrm"] Oct 06 15:11:03 crc kubenswrapper[4757]: W1006 15:11:03.203789 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9361ecc9_790e_45fc_b84d_7786f69f0929.slice/crio-e00f3b18ae62343d7855d7645160ad67a2a558311e55ba08f97fab40f9b54308 WatchSource:0}: Error finding container e00f3b18ae62343d7855d7645160ad67a2a558311e55ba08f97fab40f9b54308: Status 404 returned error can't find the container with id e00f3b18ae62343d7855d7645160ad67a2a558311e55ba08f97fab40f9b54308 Oct 06 15:11:03 crc kubenswrapper[4757]: I1006 15:11:03.918327 4757 generic.go:334] "Generic (PLEG): container finished" podID="2c3c9933-3b11-4fc8-a1a2-ef87ba376118" containerID="3e08841546ae669a34ca24413fe482ff5c97915d79f7f575a91d9c60246dab1c" exitCode=0 Oct 06 15:11:03 crc kubenswrapper[4757]: I1006 15:11:03.918391 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bb5b457f7-dd6rq" event={"ID":"2c3c9933-3b11-4fc8-a1a2-ef87ba376118","Type":"ContainerDied","Data":"3e08841546ae669a34ca24413fe482ff5c97915d79f7f575a91d9c60246dab1c"} Oct 06 15:11:03 crc kubenswrapper[4757]: I1006 15:11:03.918418 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bb5b457f7-dd6rq" event={"ID":"2c3c9933-3b11-4fc8-a1a2-ef87ba376118","Type":"ContainerStarted","Data":"ca6c98367fc6a2a8b9b5af1316e6e61db46b5f2161a229db870e4236858d19a6"} Oct 06 15:11:03 crc kubenswrapper[4757]: I1006 15:11:03.931370 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7dc5fcd74d-zllrm" event={"ID":"9361ecc9-790e-45fc-b84d-7786f69f0929","Type":"ContainerStarted","Data":"3b4791232bdbc27b8a205b666d18ec791a58da126eb3c7ea499c1e77ff06f193"} Oct 06 15:11:03 crc kubenswrapper[4757]: I1006 15:11:03.931415 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7dc5fcd74d-zllrm" event={"ID":"9361ecc9-790e-45fc-b84d-7786f69f0929","Type":"ContainerStarted","Data":"2baa9b34488b6b0146e042c643058f10644cadf5d3ff734eb3c8eec99aa9cb9c"} Oct 06 15:11:03 crc kubenswrapper[4757]: I1006 15:11:03.931424 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7dc5fcd74d-zllrm" event={"ID":"9361ecc9-790e-45fc-b84d-7786f69f0929","Type":"ContainerStarted","Data":"e00f3b18ae62343d7855d7645160ad67a2a558311e55ba08f97fab40f9b54308"} Oct 06 15:11:03 crc kubenswrapper[4757]: I1006 15:11:03.932185 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-7dc5fcd74d-zllrm" Oct 06 15:11:03 crc kubenswrapper[4757]: I1006 15:11:03.992944 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-7dc5fcd74d-zllrm" podStartSLOduration=1.99292358 podStartE2EDuration="1.99292358s" podCreationTimestamp="2025-10-06 15:11:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 15:11:03.968775269 +0000 UTC m=+5552.466093806" watchObservedRunningTime="2025-10-06 15:11:03.99292358 +0000 UTC m=+5552.490242117" Oct 06 15:11:04 crc kubenswrapper[4757]: I1006 15:11:04.947861 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bb5b457f7-dd6rq" event={"ID":"2c3c9933-3b11-4fc8-a1a2-ef87ba376118","Type":"ContainerStarted","Data":"ae72f9305af8280ab1535dc8431da410f779598cb6ad217218a03257ccf9eaa8"} Oct 06 15:11:04 crc kubenswrapper[4757]: I1006 15:11:04.948167 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5bb5b457f7-dd6rq" Oct 06 15:11:04 crc kubenswrapper[4757]: I1006 15:11:04.969675 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5bb5b457f7-dd6rq" podStartSLOduration=2.9696562220000002 podStartE2EDuration="2.969656222s" podCreationTimestamp="2025-10-06 15:11:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 15:11:04.963365018 +0000 UTC m=+5553.460683575" watchObservedRunningTime="2025-10-06 15:11:04.969656222 +0000 UTC m=+5553.466974759" Oct 06 15:11:05 crc kubenswrapper[4757]: I1006 15:11:05.080362 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-597d785b9-4x6ld"] Oct 06 15:11:05 crc kubenswrapper[4757]: I1006 15:11:05.095503 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-597d785b9-4x6ld"] Oct 06 15:11:05 crc kubenswrapper[4757]: I1006 15:11:05.095690 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-597d785b9-4x6ld" Oct 06 15:11:05 crc kubenswrapper[4757]: I1006 15:11:05.104137 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-internal-svc" Oct 06 15:11:05 crc kubenswrapper[4757]: I1006 15:11:05.105322 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-public-svc" Oct 06 15:11:05 crc kubenswrapper[4757]: I1006 15:11:05.153083 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/90ffb97a-99b4-4c16-b9dc-6295fb5d17df-public-tls-certs\") pod \"neutron-597d785b9-4x6ld\" (UID: \"90ffb97a-99b4-4c16-b9dc-6295fb5d17df\") " pod="openstack/neutron-597d785b9-4x6ld" Oct 06 15:11:05 crc kubenswrapper[4757]: I1006 15:11:05.153217 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/90ffb97a-99b4-4c16-b9dc-6295fb5d17df-httpd-config\") pod \"neutron-597d785b9-4x6ld\" (UID: \"90ffb97a-99b4-4c16-b9dc-6295fb5d17df\") " pod="openstack/neutron-597d785b9-4x6ld" Oct 06 15:11:05 crc kubenswrapper[4757]: I1006 15:11:05.153395 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/90ffb97a-99b4-4c16-b9dc-6295fb5d17df-ovndb-tls-certs\") pod \"neutron-597d785b9-4x6ld\" (UID: \"90ffb97a-99b4-4c16-b9dc-6295fb5d17df\") " pod="openstack/neutron-597d785b9-4x6ld" Oct 06 15:11:05 crc kubenswrapper[4757]: I1006 15:11:05.153424 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/90ffb97a-99b4-4c16-b9dc-6295fb5d17df-combined-ca-bundle\") pod \"neutron-597d785b9-4x6ld\" (UID: \"90ffb97a-99b4-4c16-b9dc-6295fb5d17df\") " pod="openstack/neutron-597d785b9-4x6ld" Oct 06 15:11:05 crc kubenswrapper[4757]: I1006 15:11:05.153515 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/90ffb97a-99b4-4c16-b9dc-6295fb5d17df-config\") pod \"neutron-597d785b9-4x6ld\" (UID: \"90ffb97a-99b4-4c16-b9dc-6295fb5d17df\") " pod="openstack/neutron-597d785b9-4x6ld" Oct 06 15:11:05 crc kubenswrapper[4757]: I1006 15:11:05.153584 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jmphl\" (UniqueName: \"kubernetes.io/projected/90ffb97a-99b4-4c16-b9dc-6295fb5d17df-kube-api-access-jmphl\") pod \"neutron-597d785b9-4x6ld\" (UID: \"90ffb97a-99b4-4c16-b9dc-6295fb5d17df\") " pod="openstack/neutron-597d785b9-4x6ld" Oct 06 15:11:05 crc kubenswrapper[4757]: I1006 15:11:05.153635 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/90ffb97a-99b4-4c16-b9dc-6295fb5d17df-internal-tls-certs\") pod \"neutron-597d785b9-4x6ld\" (UID: \"90ffb97a-99b4-4c16-b9dc-6295fb5d17df\") " pod="openstack/neutron-597d785b9-4x6ld" Oct 06 15:11:05 crc kubenswrapper[4757]: I1006 15:11:05.255481 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/90ffb97a-99b4-4c16-b9dc-6295fb5d17df-httpd-config\") pod \"neutron-597d785b9-4x6ld\" (UID: \"90ffb97a-99b4-4c16-b9dc-6295fb5d17df\") " pod="openstack/neutron-597d785b9-4x6ld" Oct 06 15:11:05 crc kubenswrapper[4757]: I1006 15:11:05.255810 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/90ffb97a-99b4-4c16-b9dc-6295fb5d17df-ovndb-tls-certs\") pod \"neutron-597d785b9-4x6ld\" (UID: \"90ffb97a-99b4-4c16-b9dc-6295fb5d17df\") " pod="openstack/neutron-597d785b9-4x6ld" Oct 06 15:11:05 crc kubenswrapper[4757]: I1006 15:11:05.255923 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/90ffb97a-99b4-4c16-b9dc-6295fb5d17df-combined-ca-bundle\") pod \"neutron-597d785b9-4x6ld\" (UID: \"90ffb97a-99b4-4c16-b9dc-6295fb5d17df\") " pod="openstack/neutron-597d785b9-4x6ld" Oct 06 15:11:05 crc kubenswrapper[4757]: I1006 15:11:05.256051 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/90ffb97a-99b4-4c16-b9dc-6295fb5d17df-config\") pod \"neutron-597d785b9-4x6ld\" (UID: \"90ffb97a-99b4-4c16-b9dc-6295fb5d17df\") " pod="openstack/neutron-597d785b9-4x6ld" Oct 06 15:11:05 crc kubenswrapper[4757]: I1006 15:11:05.256205 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jmphl\" (UniqueName: \"kubernetes.io/projected/90ffb97a-99b4-4c16-b9dc-6295fb5d17df-kube-api-access-jmphl\") pod \"neutron-597d785b9-4x6ld\" (UID: \"90ffb97a-99b4-4c16-b9dc-6295fb5d17df\") " pod="openstack/neutron-597d785b9-4x6ld" Oct 06 15:11:05 crc kubenswrapper[4757]: I1006 15:11:05.256317 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/90ffb97a-99b4-4c16-b9dc-6295fb5d17df-internal-tls-certs\") pod \"neutron-597d785b9-4x6ld\" (UID: \"90ffb97a-99b4-4c16-b9dc-6295fb5d17df\") " pod="openstack/neutron-597d785b9-4x6ld" Oct 06 15:11:05 crc kubenswrapper[4757]: I1006 15:11:05.256461 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/90ffb97a-99b4-4c16-b9dc-6295fb5d17df-public-tls-certs\") pod \"neutron-597d785b9-4x6ld\" (UID: \"90ffb97a-99b4-4c16-b9dc-6295fb5d17df\") " pod="openstack/neutron-597d785b9-4x6ld" Oct 06 15:11:05 crc kubenswrapper[4757]: I1006 15:11:05.263960 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/90ffb97a-99b4-4c16-b9dc-6295fb5d17df-public-tls-certs\") pod \"neutron-597d785b9-4x6ld\" (UID: \"90ffb97a-99b4-4c16-b9dc-6295fb5d17df\") " pod="openstack/neutron-597d785b9-4x6ld" Oct 06 15:11:05 crc kubenswrapper[4757]: I1006 15:11:05.264411 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/90ffb97a-99b4-4c16-b9dc-6295fb5d17df-httpd-config\") pod \"neutron-597d785b9-4x6ld\" (UID: \"90ffb97a-99b4-4c16-b9dc-6295fb5d17df\") " pod="openstack/neutron-597d785b9-4x6ld" Oct 06 15:11:05 crc kubenswrapper[4757]: I1006 15:11:05.264962 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/90ffb97a-99b4-4c16-b9dc-6295fb5d17df-internal-tls-certs\") pod \"neutron-597d785b9-4x6ld\" (UID: \"90ffb97a-99b4-4c16-b9dc-6295fb5d17df\") " pod="openstack/neutron-597d785b9-4x6ld" Oct 06 15:11:05 crc kubenswrapper[4757]: I1006 15:11:05.268607 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/90ffb97a-99b4-4c16-b9dc-6295fb5d17df-ovndb-tls-certs\") pod \"neutron-597d785b9-4x6ld\" (UID: \"90ffb97a-99b4-4c16-b9dc-6295fb5d17df\") " pod="openstack/neutron-597d785b9-4x6ld" Oct 06 15:11:05 crc kubenswrapper[4757]: I1006 15:11:05.269862 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/90ffb97a-99b4-4c16-b9dc-6295fb5d17df-combined-ca-bundle\") pod \"neutron-597d785b9-4x6ld\" (UID: \"90ffb97a-99b4-4c16-b9dc-6295fb5d17df\") " pod="openstack/neutron-597d785b9-4x6ld" Oct 06 15:11:05 crc kubenswrapper[4757]: I1006 15:11:05.279043 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/90ffb97a-99b4-4c16-b9dc-6295fb5d17df-config\") pod \"neutron-597d785b9-4x6ld\" (UID: \"90ffb97a-99b4-4c16-b9dc-6295fb5d17df\") " pod="openstack/neutron-597d785b9-4x6ld" Oct 06 15:11:05 crc kubenswrapper[4757]: I1006 15:11:05.282626 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jmphl\" (UniqueName: \"kubernetes.io/projected/90ffb97a-99b4-4c16-b9dc-6295fb5d17df-kube-api-access-jmphl\") pod \"neutron-597d785b9-4x6ld\" (UID: \"90ffb97a-99b4-4c16-b9dc-6295fb5d17df\") " pod="openstack/neutron-597d785b9-4x6ld" Oct 06 15:11:05 crc kubenswrapper[4757]: I1006 15:11:05.432510 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-597d785b9-4x6ld" Oct 06 15:11:05 crc kubenswrapper[4757]: I1006 15:11:05.953862 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-597d785b9-4x6ld"] Oct 06 15:11:05 crc kubenswrapper[4757]: W1006 15:11:05.962342 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod90ffb97a_99b4_4c16_b9dc_6295fb5d17df.slice/crio-e86e189f1bc940bf3ca8699b0134f69eb646e6ab4ab796252b375d7feee87d4e WatchSource:0}: Error finding container e86e189f1bc940bf3ca8699b0134f69eb646e6ab4ab796252b375d7feee87d4e: Status 404 returned error can't find the container with id e86e189f1bc940bf3ca8699b0134f69eb646e6ab4ab796252b375d7feee87d4e Oct 06 15:11:06 crc kubenswrapper[4757]: I1006 15:11:06.967474 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-597d785b9-4x6ld" event={"ID":"90ffb97a-99b4-4c16-b9dc-6295fb5d17df","Type":"ContainerStarted","Data":"d674774dbc771b49606ea3a0f38a8a0ecb187f6480f29c2b6a7f2db1d8e911dd"} Oct 06 15:11:06 crc kubenswrapper[4757]: I1006 15:11:06.967872 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-597d785b9-4x6ld" event={"ID":"90ffb97a-99b4-4c16-b9dc-6295fb5d17df","Type":"ContainerStarted","Data":"cd09aeb55bba4cdf1e01756534e916aa616190c379c7a23d6f8072e2f403d320"} Oct 06 15:11:06 crc kubenswrapper[4757]: I1006 15:11:06.967891 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-597d785b9-4x6ld" event={"ID":"90ffb97a-99b4-4c16-b9dc-6295fb5d17df","Type":"ContainerStarted","Data":"e86e189f1bc940bf3ca8699b0134f69eb646e6ab4ab796252b375d7feee87d4e"} Oct 06 15:11:06 crc kubenswrapper[4757]: I1006 15:11:06.989040 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-597d785b9-4x6ld" podStartSLOduration=1.989010558 podStartE2EDuration="1.989010558s" podCreationTimestamp="2025-10-06 15:11:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 15:11:06.988023546 +0000 UTC m=+5555.485342083" watchObservedRunningTime="2025-10-06 15:11:06.989010558 +0000 UTC m=+5555.486329085" Oct 06 15:11:07 crc kubenswrapper[4757]: I1006 15:11:07.976432 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-597d785b9-4x6ld" Oct 06 15:11:12 crc kubenswrapper[4757]: I1006 15:11:12.381344 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5bb5b457f7-dd6rq" Oct 06 15:11:12 crc kubenswrapper[4757]: I1006 15:11:12.449170 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-66ddf4b5cc-2n29n"] Oct 06 15:11:12 crc kubenswrapper[4757]: I1006 15:11:12.449455 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-66ddf4b5cc-2n29n" podUID="5928ff8b-751a-411b-8705-9564341390c3" containerName="dnsmasq-dns" containerID="cri-o://c358be74b36fbd9a57c8796b4f3cff873f073af4fb0200cc0fe7cc97d0276231" gracePeriod=10 Oct 06 15:11:12 crc kubenswrapper[4757]: E1006 15:11:12.612322 4757 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5928ff8b_751a_411b_8705_9564341390c3.slice/crio-conmon-c358be74b36fbd9a57c8796b4f3cff873f073af4fb0200cc0fe7cc97d0276231.scope\": RecentStats: unable to find data in memory cache]" Oct 06 15:11:13 crc kubenswrapper[4757]: I1006 15:11:13.000381 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-66ddf4b5cc-2n29n" Oct 06 15:11:13 crc kubenswrapper[4757]: I1006 15:11:13.047438 4757 generic.go:334] "Generic (PLEG): container finished" podID="5928ff8b-751a-411b-8705-9564341390c3" containerID="c358be74b36fbd9a57c8796b4f3cff873f073af4fb0200cc0fe7cc97d0276231" exitCode=0 Oct 06 15:11:13 crc kubenswrapper[4757]: I1006 15:11:13.047518 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-66ddf4b5cc-2n29n" event={"ID":"5928ff8b-751a-411b-8705-9564341390c3","Type":"ContainerDied","Data":"c358be74b36fbd9a57c8796b4f3cff873f073af4fb0200cc0fe7cc97d0276231"} Oct 06 15:11:13 crc kubenswrapper[4757]: I1006 15:11:13.047558 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-66ddf4b5cc-2n29n" event={"ID":"5928ff8b-751a-411b-8705-9564341390c3","Type":"ContainerDied","Data":"e1e17d027b0f05fcf37ae652efb05f16f59acf6d2a1d9404a8bdc262c8eadba9"} Oct 06 15:11:13 crc kubenswrapper[4757]: I1006 15:11:13.047575 4757 scope.go:117] "RemoveContainer" containerID="c358be74b36fbd9a57c8796b4f3cff873f073af4fb0200cc0fe7cc97d0276231" Oct 06 15:11:13 crc kubenswrapper[4757]: I1006 15:11:13.048077 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-66ddf4b5cc-2n29n" Oct 06 15:11:13 crc kubenswrapper[4757]: I1006 15:11:13.071234 4757 scope.go:117] "RemoveContainer" containerID="eb359a8ee3f3d8100fdfe32954ae47e3545479d943f2a6662a76ad3ec9563a62" Oct 06 15:11:13 crc kubenswrapper[4757]: I1006 15:11:13.097183 4757 scope.go:117] "RemoveContainer" containerID="c358be74b36fbd9a57c8796b4f3cff873f073af4fb0200cc0fe7cc97d0276231" Oct 06 15:11:13 crc kubenswrapper[4757]: E1006 15:11:13.097570 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c358be74b36fbd9a57c8796b4f3cff873f073af4fb0200cc0fe7cc97d0276231\": container with ID starting with c358be74b36fbd9a57c8796b4f3cff873f073af4fb0200cc0fe7cc97d0276231 not found: ID does not exist" containerID="c358be74b36fbd9a57c8796b4f3cff873f073af4fb0200cc0fe7cc97d0276231" Oct 06 15:11:13 crc kubenswrapper[4757]: I1006 15:11:13.097605 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c358be74b36fbd9a57c8796b4f3cff873f073af4fb0200cc0fe7cc97d0276231"} err="failed to get container status \"c358be74b36fbd9a57c8796b4f3cff873f073af4fb0200cc0fe7cc97d0276231\": rpc error: code = NotFound desc = could not find container \"c358be74b36fbd9a57c8796b4f3cff873f073af4fb0200cc0fe7cc97d0276231\": container with ID starting with c358be74b36fbd9a57c8796b4f3cff873f073af4fb0200cc0fe7cc97d0276231 not found: ID does not exist" Oct 06 15:11:13 crc kubenswrapper[4757]: I1006 15:11:13.097628 4757 scope.go:117] "RemoveContainer" containerID="eb359a8ee3f3d8100fdfe32954ae47e3545479d943f2a6662a76ad3ec9563a62" Oct 06 15:11:13 crc kubenswrapper[4757]: E1006 15:11:13.097941 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eb359a8ee3f3d8100fdfe32954ae47e3545479d943f2a6662a76ad3ec9563a62\": container with ID starting with eb359a8ee3f3d8100fdfe32954ae47e3545479d943f2a6662a76ad3ec9563a62 not found: ID does not exist" containerID="eb359a8ee3f3d8100fdfe32954ae47e3545479d943f2a6662a76ad3ec9563a62" Oct 06 15:11:13 crc kubenswrapper[4757]: I1006 15:11:13.097967 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eb359a8ee3f3d8100fdfe32954ae47e3545479d943f2a6662a76ad3ec9563a62"} err="failed to get container status \"eb359a8ee3f3d8100fdfe32954ae47e3545479d943f2a6662a76ad3ec9563a62\": rpc error: code = NotFound desc = could not find container \"eb359a8ee3f3d8100fdfe32954ae47e3545479d943f2a6662a76ad3ec9563a62\": container with ID starting with eb359a8ee3f3d8100fdfe32954ae47e3545479d943f2a6662a76ad3ec9563a62 not found: ID does not exist" Oct 06 15:11:13 crc kubenswrapper[4757]: I1006 15:11:13.100748 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5928ff8b-751a-411b-8705-9564341390c3-ovsdbserver-sb\") pod \"5928ff8b-751a-411b-8705-9564341390c3\" (UID: \"5928ff8b-751a-411b-8705-9564341390c3\") " Oct 06 15:11:13 crc kubenswrapper[4757]: I1006 15:11:13.100881 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wfbzr\" (UniqueName: \"kubernetes.io/projected/5928ff8b-751a-411b-8705-9564341390c3-kube-api-access-wfbzr\") pod \"5928ff8b-751a-411b-8705-9564341390c3\" (UID: \"5928ff8b-751a-411b-8705-9564341390c3\") " Oct 06 15:11:13 crc kubenswrapper[4757]: I1006 15:11:13.101000 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5928ff8b-751a-411b-8705-9564341390c3-dns-svc\") pod \"5928ff8b-751a-411b-8705-9564341390c3\" (UID: \"5928ff8b-751a-411b-8705-9564341390c3\") " Oct 06 15:11:13 crc kubenswrapper[4757]: I1006 15:11:13.101075 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5928ff8b-751a-411b-8705-9564341390c3-ovsdbserver-nb\") pod \"5928ff8b-751a-411b-8705-9564341390c3\" (UID: \"5928ff8b-751a-411b-8705-9564341390c3\") " Oct 06 15:11:13 crc kubenswrapper[4757]: I1006 15:11:13.101123 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5928ff8b-751a-411b-8705-9564341390c3-config\") pod \"5928ff8b-751a-411b-8705-9564341390c3\" (UID: \"5928ff8b-751a-411b-8705-9564341390c3\") " Oct 06 15:11:13 crc kubenswrapper[4757]: I1006 15:11:13.106735 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5928ff8b-751a-411b-8705-9564341390c3-kube-api-access-wfbzr" (OuterVolumeSpecName: "kube-api-access-wfbzr") pod "5928ff8b-751a-411b-8705-9564341390c3" (UID: "5928ff8b-751a-411b-8705-9564341390c3"). InnerVolumeSpecName "kube-api-access-wfbzr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 15:11:13 crc kubenswrapper[4757]: I1006 15:11:13.141814 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5928ff8b-751a-411b-8705-9564341390c3-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "5928ff8b-751a-411b-8705-9564341390c3" (UID: "5928ff8b-751a-411b-8705-9564341390c3"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 15:11:13 crc kubenswrapper[4757]: I1006 15:11:13.143167 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5928ff8b-751a-411b-8705-9564341390c3-config" (OuterVolumeSpecName: "config") pod "5928ff8b-751a-411b-8705-9564341390c3" (UID: "5928ff8b-751a-411b-8705-9564341390c3"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 15:11:13 crc kubenswrapper[4757]: I1006 15:11:13.145999 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5928ff8b-751a-411b-8705-9564341390c3-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "5928ff8b-751a-411b-8705-9564341390c3" (UID: "5928ff8b-751a-411b-8705-9564341390c3"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 15:11:13 crc kubenswrapper[4757]: I1006 15:11:13.146475 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5928ff8b-751a-411b-8705-9564341390c3-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "5928ff8b-751a-411b-8705-9564341390c3" (UID: "5928ff8b-751a-411b-8705-9564341390c3"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 15:11:13 crc kubenswrapper[4757]: I1006 15:11:13.203407 4757 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5928ff8b-751a-411b-8705-9564341390c3-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 06 15:11:13 crc kubenswrapper[4757]: I1006 15:11:13.203460 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wfbzr\" (UniqueName: \"kubernetes.io/projected/5928ff8b-751a-411b-8705-9564341390c3-kube-api-access-wfbzr\") on node \"crc\" DevicePath \"\"" Oct 06 15:11:13 crc kubenswrapper[4757]: I1006 15:11:13.203477 4757 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5928ff8b-751a-411b-8705-9564341390c3-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 06 15:11:13 crc kubenswrapper[4757]: I1006 15:11:13.203489 4757 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5928ff8b-751a-411b-8705-9564341390c3-config\") on node \"crc\" DevicePath \"\"" Oct 06 15:11:13 crc kubenswrapper[4757]: I1006 15:11:13.203501 4757 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5928ff8b-751a-411b-8705-9564341390c3-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 06 15:11:13 crc kubenswrapper[4757]: I1006 15:11:13.387301 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-66ddf4b5cc-2n29n"] Oct 06 15:11:13 crc kubenswrapper[4757]: I1006 15:11:13.398817 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-66ddf4b5cc-2n29n"] Oct 06 15:11:14 crc kubenswrapper[4757]: I1006 15:11:14.193251 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5928ff8b-751a-411b-8705-9564341390c3" path="/var/lib/kubelet/pods/5928ff8b-751a-411b-8705-9564341390c3/volumes" Oct 06 15:11:17 crc kubenswrapper[4757]: I1006 15:11:17.905523 4757 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-66ddf4b5cc-2n29n" podUID="5928ff8b-751a-411b-8705-9564341390c3" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.1.33:5353: i/o timeout" Oct 06 15:11:32 crc kubenswrapper[4757]: I1006 15:11:32.484934 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-7dc5fcd74d-zllrm" Oct 06 15:11:35 crc kubenswrapper[4757]: I1006 15:11:35.456472 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-597d785b9-4x6ld" Oct 06 15:11:35 crc kubenswrapper[4757]: I1006 15:11:35.551071 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-7dc5fcd74d-zllrm"] Oct 06 15:11:35 crc kubenswrapper[4757]: I1006 15:11:35.551441 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-7dc5fcd74d-zllrm" podUID="9361ecc9-790e-45fc-b84d-7786f69f0929" containerName="neutron-api" containerID="cri-o://2baa9b34488b6b0146e042c643058f10644cadf5d3ff734eb3c8eec99aa9cb9c" gracePeriod=30 Oct 06 15:11:35 crc kubenswrapper[4757]: I1006 15:11:35.552442 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-7dc5fcd74d-zllrm" podUID="9361ecc9-790e-45fc-b84d-7786f69f0929" containerName="neutron-httpd" containerID="cri-o://3b4791232bdbc27b8a205b666d18ec791a58da126eb3c7ea499c1e77ff06f193" gracePeriod=30 Oct 06 15:11:36 crc kubenswrapper[4757]: I1006 15:11:36.280294 4757 generic.go:334] "Generic (PLEG): container finished" podID="9361ecc9-790e-45fc-b84d-7786f69f0929" containerID="3b4791232bdbc27b8a205b666d18ec791a58da126eb3c7ea499c1e77ff06f193" exitCode=0 Oct 06 15:11:36 crc kubenswrapper[4757]: I1006 15:11:36.280396 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7dc5fcd74d-zllrm" event={"ID":"9361ecc9-790e-45fc-b84d-7786f69f0929","Type":"ContainerDied","Data":"3b4791232bdbc27b8a205b666d18ec791a58da126eb3c7ea499c1e77ff06f193"} Oct 06 15:11:39 crc kubenswrapper[4757]: I1006 15:11:39.309330 4757 generic.go:334] "Generic (PLEG): container finished" podID="9361ecc9-790e-45fc-b84d-7786f69f0929" containerID="2baa9b34488b6b0146e042c643058f10644cadf5d3ff734eb3c8eec99aa9cb9c" exitCode=0 Oct 06 15:11:39 crc kubenswrapper[4757]: I1006 15:11:39.309410 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7dc5fcd74d-zllrm" event={"ID":"9361ecc9-790e-45fc-b84d-7786f69f0929","Type":"ContainerDied","Data":"2baa9b34488b6b0146e042c643058f10644cadf5d3ff734eb3c8eec99aa9cb9c"} Oct 06 15:11:39 crc kubenswrapper[4757]: I1006 15:11:39.507196 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7dc5fcd74d-zllrm" Oct 06 15:11:39 crc kubenswrapper[4757]: I1006 15:11:39.640802 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/9361ecc9-790e-45fc-b84d-7786f69f0929-config\") pod \"9361ecc9-790e-45fc-b84d-7786f69f0929\" (UID: \"9361ecc9-790e-45fc-b84d-7786f69f0929\") " Oct 06 15:11:39 crc kubenswrapper[4757]: I1006 15:11:39.640914 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/9361ecc9-790e-45fc-b84d-7786f69f0929-httpd-config\") pod \"9361ecc9-790e-45fc-b84d-7786f69f0929\" (UID: \"9361ecc9-790e-45fc-b84d-7786f69f0929\") " Oct 06 15:11:39 crc kubenswrapper[4757]: I1006 15:11:39.641172 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9361ecc9-790e-45fc-b84d-7786f69f0929-combined-ca-bundle\") pod \"9361ecc9-790e-45fc-b84d-7786f69f0929\" (UID: \"9361ecc9-790e-45fc-b84d-7786f69f0929\") " Oct 06 15:11:39 crc kubenswrapper[4757]: I1006 15:11:39.641214 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x8j2b\" (UniqueName: \"kubernetes.io/projected/9361ecc9-790e-45fc-b84d-7786f69f0929-kube-api-access-x8j2b\") pod \"9361ecc9-790e-45fc-b84d-7786f69f0929\" (UID: \"9361ecc9-790e-45fc-b84d-7786f69f0929\") " Oct 06 15:11:39 crc kubenswrapper[4757]: I1006 15:11:39.641271 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/9361ecc9-790e-45fc-b84d-7786f69f0929-ovndb-tls-certs\") pod \"9361ecc9-790e-45fc-b84d-7786f69f0929\" (UID: \"9361ecc9-790e-45fc-b84d-7786f69f0929\") " Oct 06 15:11:39 crc kubenswrapper[4757]: I1006 15:11:39.646534 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9361ecc9-790e-45fc-b84d-7786f69f0929-kube-api-access-x8j2b" (OuterVolumeSpecName: "kube-api-access-x8j2b") pod "9361ecc9-790e-45fc-b84d-7786f69f0929" (UID: "9361ecc9-790e-45fc-b84d-7786f69f0929"). InnerVolumeSpecName "kube-api-access-x8j2b". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 15:11:39 crc kubenswrapper[4757]: I1006 15:11:39.653967 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9361ecc9-790e-45fc-b84d-7786f69f0929-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "9361ecc9-790e-45fc-b84d-7786f69f0929" (UID: "9361ecc9-790e-45fc-b84d-7786f69f0929"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 15:11:39 crc kubenswrapper[4757]: I1006 15:11:39.700584 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9361ecc9-790e-45fc-b84d-7786f69f0929-config" (OuterVolumeSpecName: "config") pod "9361ecc9-790e-45fc-b84d-7786f69f0929" (UID: "9361ecc9-790e-45fc-b84d-7786f69f0929"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 15:11:39 crc kubenswrapper[4757]: I1006 15:11:39.724222 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9361ecc9-790e-45fc-b84d-7786f69f0929-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9361ecc9-790e-45fc-b84d-7786f69f0929" (UID: "9361ecc9-790e-45fc-b84d-7786f69f0929"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 15:11:39 crc kubenswrapper[4757]: I1006 15:11:39.725921 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9361ecc9-790e-45fc-b84d-7786f69f0929-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "9361ecc9-790e-45fc-b84d-7786f69f0929" (UID: "9361ecc9-790e-45fc-b84d-7786f69f0929"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 15:11:39 crc kubenswrapper[4757]: I1006 15:11:39.743178 4757 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9361ecc9-790e-45fc-b84d-7786f69f0929-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 06 15:11:39 crc kubenswrapper[4757]: I1006 15:11:39.743209 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x8j2b\" (UniqueName: \"kubernetes.io/projected/9361ecc9-790e-45fc-b84d-7786f69f0929-kube-api-access-x8j2b\") on node \"crc\" DevicePath \"\"" Oct 06 15:11:39 crc kubenswrapper[4757]: I1006 15:11:39.743219 4757 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/9361ecc9-790e-45fc-b84d-7786f69f0929-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 06 15:11:39 crc kubenswrapper[4757]: I1006 15:11:39.743228 4757 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/9361ecc9-790e-45fc-b84d-7786f69f0929-config\") on node \"crc\" DevicePath \"\"" Oct 06 15:11:39 crc kubenswrapper[4757]: I1006 15:11:39.743238 4757 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/9361ecc9-790e-45fc-b84d-7786f69f0929-httpd-config\") on node \"crc\" DevicePath \"\"" Oct 06 15:11:40 crc kubenswrapper[4757]: I1006 15:11:40.325351 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7dc5fcd74d-zllrm" event={"ID":"9361ecc9-790e-45fc-b84d-7786f69f0929","Type":"ContainerDied","Data":"e00f3b18ae62343d7855d7645160ad67a2a558311e55ba08f97fab40f9b54308"} Oct 06 15:11:40 crc kubenswrapper[4757]: I1006 15:11:40.325420 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7dc5fcd74d-zllrm" Oct 06 15:11:40 crc kubenswrapper[4757]: I1006 15:11:40.325438 4757 scope.go:117] "RemoveContainer" containerID="3b4791232bdbc27b8a205b666d18ec791a58da126eb3c7ea499c1e77ff06f193" Oct 06 15:11:40 crc kubenswrapper[4757]: I1006 15:11:40.355261 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-7dc5fcd74d-zllrm"] Oct 06 15:11:40 crc kubenswrapper[4757]: I1006 15:11:40.362577 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-7dc5fcd74d-zllrm"] Oct 06 15:11:40 crc kubenswrapper[4757]: I1006 15:11:40.374963 4757 scope.go:117] "RemoveContainer" containerID="2baa9b34488b6b0146e042c643058f10644cadf5d3ff734eb3c8eec99aa9cb9c" Oct 06 15:11:42 crc kubenswrapper[4757]: I1006 15:11:42.194336 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9361ecc9-790e-45fc-b84d-7786f69f0929" path="/var/lib/kubelet/pods/9361ecc9-790e-45fc-b84d-7786f69f0929/volumes" Oct 06 15:12:04 crc kubenswrapper[4757]: I1006 15:12:04.361637 4757 patch_prober.go:28] interesting pod/machine-config-daemon-7tb7h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 06 15:12:04 crc kubenswrapper[4757]: I1006 15:12:04.362633 4757 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 06 15:12:34 crc kubenswrapper[4757]: I1006 15:12:34.361661 4757 patch_prober.go:28] interesting pod/machine-config-daemon-7tb7h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 06 15:12:34 crc kubenswrapper[4757]: I1006 15:12:34.362417 4757 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 06 15:12:38 crc kubenswrapper[4757]: I1006 15:12:38.244746 4757 scope.go:117] "RemoveContainer" containerID="5138545c07de1ffa5eefbd9d4ac4a7e039ade0fc93eda1f5ae1445771726047a" Oct 06 15:12:52 crc kubenswrapper[4757]: I1006 15:12:52.844286 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-25nfh/must-gather-2vkcl"] Oct 06 15:12:52 crc kubenswrapper[4757]: E1006 15:12:52.845236 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9361ecc9-790e-45fc-b84d-7786f69f0929" containerName="neutron-api" Oct 06 15:12:52 crc kubenswrapper[4757]: I1006 15:12:52.845250 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="9361ecc9-790e-45fc-b84d-7786f69f0929" containerName="neutron-api" Oct 06 15:12:52 crc kubenswrapper[4757]: E1006 15:12:52.845276 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9361ecc9-790e-45fc-b84d-7786f69f0929" containerName="neutron-httpd" Oct 06 15:12:52 crc kubenswrapper[4757]: I1006 15:12:52.845283 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="9361ecc9-790e-45fc-b84d-7786f69f0929" containerName="neutron-httpd" Oct 06 15:12:52 crc kubenswrapper[4757]: E1006 15:12:52.845301 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5928ff8b-751a-411b-8705-9564341390c3" containerName="init" Oct 06 15:12:52 crc kubenswrapper[4757]: I1006 15:12:52.845309 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="5928ff8b-751a-411b-8705-9564341390c3" containerName="init" Oct 06 15:12:52 crc kubenswrapper[4757]: E1006 15:12:52.845320 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5928ff8b-751a-411b-8705-9564341390c3" containerName="dnsmasq-dns" Oct 06 15:12:52 crc kubenswrapper[4757]: I1006 15:12:52.845326 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="5928ff8b-751a-411b-8705-9564341390c3" containerName="dnsmasq-dns" Oct 06 15:12:52 crc kubenswrapper[4757]: I1006 15:12:52.845527 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="5928ff8b-751a-411b-8705-9564341390c3" containerName="dnsmasq-dns" Oct 06 15:12:52 crc kubenswrapper[4757]: I1006 15:12:52.845543 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="9361ecc9-790e-45fc-b84d-7786f69f0929" containerName="neutron-api" Oct 06 15:12:52 crc kubenswrapper[4757]: I1006 15:12:52.845560 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="9361ecc9-790e-45fc-b84d-7786f69f0929" containerName="neutron-httpd" Oct 06 15:12:52 crc kubenswrapper[4757]: I1006 15:12:52.846574 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-25nfh/must-gather-2vkcl" Oct 06 15:12:52 crc kubenswrapper[4757]: I1006 15:12:52.851034 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-25nfh"/"default-dockercfg-ghsdm" Oct 06 15:12:52 crc kubenswrapper[4757]: I1006 15:12:52.851113 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-25nfh"/"openshift-service-ca.crt" Oct 06 15:12:52 crc kubenswrapper[4757]: I1006 15:12:52.852883 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-25nfh"/"kube-root-ca.crt" Oct 06 15:12:52 crc kubenswrapper[4757]: I1006 15:12:52.860208 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-25nfh/must-gather-2vkcl"] Oct 06 15:12:52 crc kubenswrapper[4757]: I1006 15:12:52.944648 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/bdf271a6-49e6-4532-90f2-ccf8971a0431-must-gather-output\") pod \"must-gather-2vkcl\" (UID: \"bdf271a6-49e6-4532-90f2-ccf8971a0431\") " pod="openshift-must-gather-25nfh/must-gather-2vkcl" Oct 06 15:12:52 crc kubenswrapper[4757]: I1006 15:12:52.944938 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b7hfw\" (UniqueName: \"kubernetes.io/projected/bdf271a6-49e6-4532-90f2-ccf8971a0431-kube-api-access-b7hfw\") pod \"must-gather-2vkcl\" (UID: \"bdf271a6-49e6-4532-90f2-ccf8971a0431\") " pod="openshift-must-gather-25nfh/must-gather-2vkcl" Oct 06 15:12:53 crc kubenswrapper[4757]: I1006 15:12:53.046719 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/bdf271a6-49e6-4532-90f2-ccf8971a0431-must-gather-output\") pod \"must-gather-2vkcl\" (UID: \"bdf271a6-49e6-4532-90f2-ccf8971a0431\") " pod="openshift-must-gather-25nfh/must-gather-2vkcl" Oct 06 15:12:53 crc kubenswrapper[4757]: I1006 15:12:53.047220 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b7hfw\" (UniqueName: \"kubernetes.io/projected/bdf271a6-49e6-4532-90f2-ccf8971a0431-kube-api-access-b7hfw\") pod \"must-gather-2vkcl\" (UID: \"bdf271a6-49e6-4532-90f2-ccf8971a0431\") " pod="openshift-must-gather-25nfh/must-gather-2vkcl" Oct 06 15:12:53 crc kubenswrapper[4757]: I1006 15:12:53.047670 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/bdf271a6-49e6-4532-90f2-ccf8971a0431-must-gather-output\") pod \"must-gather-2vkcl\" (UID: \"bdf271a6-49e6-4532-90f2-ccf8971a0431\") " pod="openshift-must-gather-25nfh/must-gather-2vkcl" Oct 06 15:12:53 crc kubenswrapper[4757]: I1006 15:12:53.072038 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b7hfw\" (UniqueName: \"kubernetes.io/projected/bdf271a6-49e6-4532-90f2-ccf8971a0431-kube-api-access-b7hfw\") pod \"must-gather-2vkcl\" (UID: \"bdf271a6-49e6-4532-90f2-ccf8971a0431\") " pod="openshift-must-gather-25nfh/must-gather-2vkcl" Oct 06 15:12:53 crc kubenswrapper[4757]: I1006 15:12:53.167183 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-25nfh/must-gather-2vkcl" Oct 06 15:12:53 crc kubenswrapper[4757]: I1006 15:12:53.466950 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-25nfh/must-gather-2vkcl"] Oct 06 15:12:54 crc kubenswrapper[4757]: I1006 15:12:54.073389 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-25nfh/must-gather-2vkcl" event={"ID":"bdf271a6-49e6-4532-90f2-ccf8971a0431","Type":"ContainerStarted","Data":"f0640646c4657f4553b757f8aa142b1c874e96d68a94a1d571303a33349c6df6"} Oct 06 15:12:58 crc kubenswrapper[4757]: I1006 15:12:58.121969 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-25nfh/must-gather-2vkcl" event={"ID":"bdf271a6-49e6-4532-90f2-ccf8971a0431","Type":"ContainerStarted","Data":"98f2d3b4ac279bf321c96ddeafb44be6de65238f5fe2cdc773fd523fadbb24d7"} Oct 06 15:12:58 crc kubenswrapper[4757]: I1006 15:12:58.122752 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-25nfh/must-gather-2vkcl" event={"ID":"bdf271a6-49e6-4532-90f2-ccf8971a0431","Type":"ContainerStarted","Data":"dbb9853c9306728a93f88448a5352b7cf00c08eb20575a4dcd489cf02379f664"} Oct 06 15:12:58 crc kubenswrapper[4757]: I1006 15:12:58.146000 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-25nfh/must-gather-2vkcl" podStartSLOduration=2.194514027 podStartE2EDuration="6.14598189s" podCreationTimestamp="2025-10-06 15:12:52 +0000 UTC" firstStartedPulling="2025-10-06 15:12:53.47993635 +0000 UTC m=+5661.977254887" lastFinishedPulling="2025-10-06 15:12:57.431404213 +0000 UTC m=+5665.928722750" observedRunningTime="2025-10-06 15:12:58.140870216 +0000 UTC m=+5666.638188753" watchObservedRunningTime="2025-10-06 15:12:58.14598189 +0000 UTC m=+5666.643300427" Oct 06 15:13:00 crc kubenswrapper[4757]: I1006 15:13:00.458324 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-25nfh/crc-debug-5b25j"] Oct 06 15:13:00 crc kubenswrapper[4757]: I1006 15:13:00.460147 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-25nfh/crc-debug-5b25j" Oct 06 15:13:00 crc kubenswrapper[4757]: I1006 15:13:00.597598 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7f708089-ac28-43ea-acdf-0e97f359af6d-host\") pod \"crc-debug-5b25j\" (UID: \"7f708089-ac28-43ea-acdf-0e97f359af6d\") " pod="openshift-must-gather-25nfh/crc-debug-5b25j" Oct 06 15:13:00 crc kubenswrapper[4757]: I1006 15:13:00.598156 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bdpn4\" (UniqueName: \"kubernetes.io/projected/7f708089-ac28-43ea-acdf-0e97f359af6d-kube-api-access-bdpn4\") pod \"crc-debug-5b25j\" (UID: \"7f708089-ac28-43ea-acdf-0e97f359af6d\") " pod="openshift-must-gather-25nfh/crc-debug-5b25j" Oct 06 15:13:00 crc kubenswrapper[4757]: I1006 15:13:00.700444 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7f708089-ac28-43ea-acdf-0e97f359af6d-host\") pod \"crc-debug-5b25j\" (UID: \"7f708089-ac28-43ea-acdf-0e97f359af6d\") " pod="openshift-must-gather-25nfh/crc-debug-5b25j" Oct 06 15:13:00 crc kubenswrapper[4757]: I1006 15:13:00.700749 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bdpn4\" (UniqueName: \"kubernetes.io/projected/7f708089-ac28-43ea-acdf-0e97f359af6d-kube-api-access-bdpn4\") pod \"crc-debug-5b25j\" (UID: \"7f708089-ac28-43ea-acdf-0e97f359af6d\") " pod="openshift-must-gather-25nfh/crc-debug-5b25j" Oct 06 15:13:00 crc kubenswrapper[4757]: I1006 15:13:00.700585 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7f708089-ac28-43ea-acdf-0e97f359af6d-host\") pod \"crc-debug-5b25j\" (UID: \"7f708089-ac28-43ea-acdf-0e97f359af6d\") " pod="openshift-must-gather-25nfh/crc-debug-5b25j" Oct 06 15:13:00 crc kubenswrapper[4757]: I1006 15:13:00.724224 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bdpn4\" (UniqueName: \"kubernetes.io/projected/7f708089-ac28-43ea-acdf-0e97f359af6d-kube-api-access-bdpn4\") pod \"crc-debug-5b25j\" (UID: \"7f708089-ac28-43ea-acdf-0e97f359af6d\") " pod="openshift-must-gather-25nfh/crc-debug-5b25j" Oct 06 15:13:00 crc kubenswrapper[4757]: I1006 15:13:00.781799 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-25nfh/crc-debug-5b25j" Oct 06 15:13:00 crc kubenswrapper[4757]: W1006 15:13:00.807897 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7f708089_ac28_43ea_acdf_0e97f359af6d.slice/crio-9d7f5644e195964cbfd419de5d7922607a98cb1e342c29bb671735300fba4674 WatchSource:0}: Error finding container 9d7f5644e195964cbfd419de5d7922607a98cb1e342c29bb671735300fba4674: Status 404 returned error can't find the container with id 9d7f5644e195964cbfd419de5d7922607a98cb1e342c29bb671735300fba4674 Oct 06 15:13:01 crc kubenswrapper[4757]: I1006 15:13:01.146280 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-25nfh/crc-debug-5b25j" event={"ID":"7f708089-ac28-43ea-acdf-0e97f359af6d","Type":"ContainerStarted","Data":"9d7f5644e195964cbfd419de5d7922607a98cb1e342c29bb671735300fba4674"} Oct 06 15:13:04 crc kubenswrapper[4757]: I1006 15:13:04.361476 4757 patch_prober.go:28] interesting pod/machine-config-daemon-7tb7h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 06 15:13:04 crc kubenswrapper[4757]: I1006 15:13:04.362010 4757 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 06 15:13:04 crc kubenswrapper[4757]: I1006 15:13:04.362064 4757 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" Oct 06 15:13:04 crc kubenswrapper[4757]: I1006 15:13:04.362848 4757 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"c9a01d3b8e651ed2a273ac73def6d0e86398bfbdd3b94e4d6d085ceec8dc0a3f"} pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 06 15:13:04 crc kubenswrapper[4757]: I1006 15:13:04.362901 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" containerName="machine-config-daemon" containerID="cri-o://c9a01d3b8e651ed2a273ac73def6d0e86398bfbdd3b94e4d6d085ceec8dc0a3f" gracePeriod=600 Oct 06 15:13:04 crc kubenswrapper[4757]: E1006 15:13:04.483729 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 15:13:05 crc kubenswrapper[4757]: I1006 15:13:05.197228 4757 generic.go:334] "Generic (PLEG): container finished" podID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" containerID="c9a01d3b8e651ed2a273ac73def6d0e86398bfbdd3b94e4d6d085ceec8dc0a3f" exitCode=0 Oct 06 15:13:05 crc kubenswrapper[4757]: I1006 15:13:05.197597 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" event={"ID":"0010c888-d5ad-4b2b-8309-1647fdf0dee3","Type":"ContainerDied","Data":"c9a01d3b8e651ed2a273ac73def6d0e86398bfbdd3b94e4d6d085ceec8dc0a3f"} Oct 06 15:13:05 crc kubenswrapper[4757]: I1006 15:13:05.197636 4757 scope.go:117] "RemoveContainer" containerID="6ada3739de20b08be50b8e188471c034ef69b5ce1559383ec174465e8517cd1b" Oct 06 15:13:05 crc kubenswrapper[4757]: I1006 15:13:05.198299 4757 scope.go:117] "RemoveContainer" containerID="c9a01d3b8e651ed2a273ac73def6d0e86398bfbdd3b94e4d6d085ceec8dc0a3f" Oct 06 15:13:05 crc kubenswrapper[4757]: E1006 15:13:05.198568 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 15:13:13 crc kubenswrapper[4757]: I1006 15:13:13.283610 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-25nfh/crc-debug-5b25j" event={"ID":"7f708089-ac28-43ea-acdf-0e97f359af6d","Type":"ContainerStarted","Data":"f27b6e7ee1ae9de3c0de1681575597e783fe8b25fb0e506e3089c0f4c2f33e8c"} Oct 06 15:13:13 crc kubenswrapper[4757]: I1006 15:13:13.299435 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-25nfh/crc-debug-5b25j" podStartSLOduration=1.876391816 podStartE2EDuration="13.299409686s" podCreationTimestamp="2025-10-06 15:13:00 +0000 UTC" firstStartedPulling="2025-10-06 15:13:00.810175373 +0000 UTC m=+5669.307493910" lastFinishedPulling="2025-10-06 15:13:12.233193233 +0000 UTC m=+5680.730511780" observedRunningTime="2025-10-06 15:13:13.297012019 +0000 UTC m=+5681.794330556" watchObservedRunningTime="2025-10-06 15:13:13.299409686 +0000 UTC m=+5681.796728223" Oct 06 15:13:20 crc kubenswrapper[4757]: I1006 15:13:20.180456 4757 scope.go:117] "RemoveContainer" containerID="c9a01d3b8e651ed2a273ac73def6d0e86398bfbdd3b94e4d6d085ceec8dc0a3f" Oct 06 15:13:20 crc kubenswrapper[4757]: E1006 15:13:20.181360 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 15:13:34 crc kubenswrapper[4757]: I1006 15:13:34.180853 4757 scope.go:117] "RemoveContainer" containerID="c9a01d3b8e651ed2a273ac73def6d0e86398bfbdd3b94e4d6d085ceec8dc0a3f" Oct 06 15:13:34 crc kubenswrapper[4757]: E1006 15:13:34.181981 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 15:13:38 crc kubenswrapper[4757]: I1006 15:13:38.329198 4757 scope.go:117] "RemoveContainer" containerID="2ffc14a7f5c29f6aea5c4d0a5ed621ca9d88bc4bb638278c4c9c5ca93a0da6d3" Oct 06 15:13:38 crc kubenswrapper[4757]: I1006 15:13:38.360278 4757 scope.go:117] "RemoveContainer" containerID="7c9c8e4f6885c56ff2d317a82c7d5d2207ad6215bcc0660abf6517bdf99124d2" Oct 06 15:13:38 crc kubenswrapper[4757]: I1006 15:13:38.434147 4757 scope.go:117] "RemoveContainer" containerID="d2d295724c333bdd2fc3e8a7f7b3afafb384048883cb1085c44fa09082c0c7cc" Oct 06 15:13:43 crc kubenswrapper[4757]: I1006 15:13:43.331602 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-6ggg2"] Oct 06 15:13:43 crc kubenswrapper[4757]: I1006 15:13:43.340151 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6ggg2" Oct 06 15:13:43 crc kubenswrapper[4757]: I1006 15:13:43.370806 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-6ggg2"] Oct 06 15:13:43 crc kubenswrapper[4757]: I1006 15:13:43.461336 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c5n2d\" (UniqueName: \"kubernetes.io/projected/f83472a5-99a9-4042-a028-dd94de7e708f-kube-api-access-c5n2d\") pod \"redhat-marketplace-6ggg2\" (UID: \"f83472a5-99a9-4042-a028-dd94de7e708f\") " pod="openshift-marketplace/redhat-marketplace-6ggg2" Oct 06 15:13:43 crc kubenswrapper[4757]: I1006 15:13:43.461572 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f83472a5-99a9-4042-a028-dd94de7e708f-utilities\") pod \"redhat-marketplace-6ggg2\" (UID: \"f83472a5-99a9-4042-a028-dd94de7e708f\") " pod="openshift-marketplace/redhat-marketplace-6ggg2" Oct 06 15:13:43 crc kubenswrapper[4757]: I1006 15:13:43.461599 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f83472a5-99a9-4042-a028-dd94de7e708f-catalog-content\") pod \"redhat-marketplace-6ggg2\" (UID: \"f83472a5-99a9-4042-a028-dd94de7e708f\") " pod="openshift-marketplace/redhat-marketplace-6ggg2" Oct 06 15:13:43 crc kubenswrapper[4757]: I1006 15:13:43.562855 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f83472a5-99a9-4042-a028-dd94de7e708f-utilities\") pod \"redhat-marketplace-6ggg2\" (UID: \"f83472a5-99a9-4042-a028-dd94de7e708f\") " pod="openshift-marketplace/redhat-marketplace-6ggg2" Oct 06 15:13:43 crc kubenswrapper[4757]: I1006 15:13:43.562904 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f83472a5-99a9-4042-a028-dd94de7e708f-catalog-content\") pod \"redhat-marketplace-6ggg2\" (UID: \"f83472a5-99a9-4042-a028-dd94de7e708f\") " pod="openshift-marketplace/redhat-marketplace-6ggg2" Oct 06 15:13:43 crc kubenswrapper[4757]: I1006 15:13:43.562943 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c5n2d\" (UniqueName: \"kubernetes.io/projected/f83472a5-99a9-4042-a028-dd94de7e708f-kube-api-access-c5n2d\") pod \"redhat-marketplace-6ggg2\" (UID: \"f83472a5-99a9-4042-a028-dd94de7e708f\") " pod="openshift-marketplace/redhat-marketplace-6ggg2" Oct 06 15:13:43 crc kubenswrapper[4757]: I1006 15:13:43.563508 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f83472a5-99a9-4042-a028-dd94de7e708f-utilities\") pod \"redhat-marketplace-6ggg2\" (UID: \"f83472a5-99a9-4042-a028-dd94de7e708f\") " pod="openshift-marketplace/redhat-marketplace-6ggg2" Oct 06 15:13:43 crc kubenswrapper[4757]: I1006 15:13:43.563543 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f83472a5-99a9-4042-a028-dd94de7e708f-catalog-content\") pod \"redhat-marketplace-6ggg2\" (UID: \"f83472a5-99a9-4042-a028-dd94de7e708f\") " pod="openshift-marketplace/redhat-marketplace-6ggg2" Oct 06 15:13:43 crc kubenswrapper[4757]: I1006 15:13:43.584475 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c5n2d\" (UniqueName: \"kubernetes.io/projected/f83472a5-99a9-4042-a028-dd94de7e708f-kube-api-access-c5n2d\") pod \"redhat-marketplace-6ggg2\" (UID: \"f83472a5-99a9-4042-a028-dd94de7e708f\") " pod="openshift-marketplace/redhat-marketplace-6ggg2" Oct 06 15:13:43 crc kubenswrapper[4757]: I1006 15:13:43.682140 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6ggg2" Oct 06 15:13:44 crc kubenswrapper[4757]: I1006 15:13:44.125121 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-6ggg2"] Oct 06 15:13:44 crc kubenswrapper[4757]: I1006 15:13:44.531432 4757 generic.go:334] "Generic (PLEG): container finished" podID="f83472a5-99a9-4042-a028-dd94de7e708f" containerID="71395402cbaca279dc70fd1a26c6739d3dab3f8c0d99a600c8bcb4b136c877aa" exitCode=0 Oct 06 15:13:44 crc kubenswrapper[4757]: I1006 15:13:44.531548 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6ggg2" event={"ID":"f83472a5-99a9-4042-a028-dd94de7e708f","Type":"ContainerDied","Data":"71395402cbaca279dc70fd1a26c6739d3dab3f8c0d99a600c8bcb4b136c877aa"} Oct 06 15:13:44 crc kubenswrapper[4757]: I1006 15:13:44.532017 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6ggg2" event={"ID":"f83472a5-99a9-4042-a028-dd94de7e708f","Type":"ContainerStarted","Data":"3cbb0537835e7fa4d518c263c3bfbc5bb024a848bef2581e2ca7ec425aaed44c"} Oct 06 15:13:45 crc kubenswrapper[4757]: I1006 15:13:45.544182 4757 generic.go:334] "Generic (PLEG): container finished" podID="f83472a5-99a9-4042-a028-dd94de7e708f" containerID="38dbbba58e2119168352162b242bf05f7183734f2054a01d345e86818e07cadd" exitCode=0 Oct 06 15:13:45 crc kubenswrapper[4757]: I1006 15:13:45.544321 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6ggg2" event={"ID":"f83472a5-99a9-4042-a028-dd94de7e708f","Type":"ContainerDied","Data":"38dbbba58e2119168352162b242bf05f7183734f2054a01d345e86818e07cadd"} Oct 06 15:13:46 crc kubenswrapper[4757]: I1006 15:13:46.555262 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6ggg2" event={"ID":"f83472a5-99a9-4042-a028-dd94de7e708f","Type":"ContainerStarted","Data":"97a668331912085fb247d93c181f25975617e688a5db2d02e6abbf946f00ca50"} Oct 06 15:13:46 crc kubenswrapper[4757]: I1006 15:13:46.580041 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-6ggg2" podStartSLOduration=2.171164306 podStartE2EDuration="3.580023786s" podCreationTimestamp="2025-10-06 15:13:43 +0000 UTC" firstStartedPulling="2025-10-06 15:13:44.537450601 +0000 UTC m=+5713.034769138" lastFinishedPulling="2025-10-06 15:13:45.946310081 +0000 UTC m=+5714.443628618" observedRunningTime="2025-10-06 15:13:46.578774097 +0000 UTC m=+5715.076092654" watchObservedRunningTime="2025-10-06 15:13:46.580023786 +0000 UTC m=+5715.077342343" Oct 06 15:13:48 crc kubenswrapper[4757]: I1006 15:13:48.182568 4757 scope.go:117] "RemoveContainer" containerID="c9a01d3b8e651ed2a273ac73def6d0e86398bfbdd3b94e4d6d085ceec8dc0a3f" Oct 06 15:13:48 crc kubenswrapper[4757]: E1006 15:13:48.182916 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 15:13:53 crc kubenswrapper[4757]: I1006 15:13:53.682604 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-6ggg2" Oct 06 15:13:53 crc kubenswrapper[4757]: I1006 15:13:53.683134 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-6ggg2" Oct 06 15:13:53 crc kubenswrapper[4757]: I1006 15:13:53.727813 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-6ggg2" Oct 06 15:13:54 crc kubenswrapper[4757]: I1006 15:13:54.770599 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-6ggg2" Oct 06 15:13:54 crc kubenswrapper[4757]: I1006 15:13:54.833418 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-6ggg2"] Oct 06 15:13:56 crc kubenswrapper[4757]: I1006 15:13:56.648194 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-6ggg2" podUID="f83472a5-99a9-4042-a028-dd94de7e708f" containerName="registry-server" containerID="cri-o://97a668331912085fb247d93c181f25975617e688a5db2d02e6abbf946f00ca50" gracePeriod=2 Oct 06 15:13:57 crc kubenswrapper[4757]: I1006 15:13:57.662865 4757 generic.go:334] "Generic (PLEG): container finished" podID="f83472a5-99a9-4042-a028-dd94de7e708f" containerID="97a668331912085fb247d93c181f25975617e688a5db2d02e6abbf946f00ca50" exitCode=0 Oct 06 15:13:57 crc kubenswrapper[4757]: I1006 15:13:57.662990 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6ggg2" event={"ID":"f83472a5-99a9-4042-a028-dd94de7e708f","Type":"ContainerDied","Data":"97a668331912085fb247d93c181f25975617e688a5db2d02e6abbf946f00ca50"} Oct 06 15:13:58 crc kubenswrapper[4757]: I1006 15:13:58.233676 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6ggg2" Oct 06 15:13:58 crc kubenswrapper[4757]: I1006 15:13:58.330561 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f83472a5-99a9-4042-a028-dd94de7e708f-utilities\") pod \"f83472a5-99a9-4042-a028-dd94de7e708f\" (UID: \"f83472a5-99a9-4042-a028-dd94de7e708f\") " Oct 06 15:13:58 crc kubenswrapper[4757]: I1006 15:13:58.330842 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f83472a5-99a9-4042-a028-dd94de7e708f-catalog-content\") pod \"f83472a5-99a9-4042-a028-dd94de7e708f\" (UID: \"f83472a5-99a9-4042-a028-dd94de7e708f\") " Oct 06 15:13:58 crc kubenswrapper[4757]: I1006 15:13:58.330891 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c5n2d\" (UniqueName: \"kubernetes.io/projected/f83472a5-99a9-4042-a028-dd94de7e708f-kube-api-access-c5n2d\") pod \"f83472a5-99a9-4042-a028-dd94de7e708f\" (UID: \"f83472a5-99a9-4042-a028-dd94de7e708f\") " Oct 06 15:13:58 crc kubenswrapper[4757]: I1006 15:13:58.331639 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f83472a5-99a9-4042-a028-dd94de7e708f-utilities" (OuterVolumeSpecName: "utilities") pod "f83472a5-99a9-4042-a028-dd94de7e708f" (UID: "f83472a5-99a9-4042-a028-dd94de7e708f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 15:13:58 crc kubenswrapper[4757]: I1006 15:13:58.336126 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f83472a5-99a9-4042-a028-dd94de7e708f-kube-api-access-c5n2d" (OuterVolumeSpecName: "kube-api-access-c5n2d") pod "f83472a5-99a9-4042-a028-dd94de7e708f" (UID: "f83472a5-99a9-4042-a028-dd94de7e708f"). InnerVolumeSpecName "kube-api-access-c5n2d". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 15:13:58 crc kubenswrapper[4757]: I1006 15:13:58.344688 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f83472a5-99a9-4042-a028-dd94de7e708f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f83472a5-99a9-4042-a028-dd94de7e708f" (UID: "f83472a5-99a9-4042-a028-dd94de7e708f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 15:13:58 crc kubenswrapper[4757]: I1006 15:13:58.432432 4757 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f83472a5-99a9-4042-a028-dd94de7e708f-utilities\") on node \"crc\" DevicePath \"\"" Oct 06 15:13:58 crc kubenswrapper[4757]: I1006 15:13:58.432475 4757 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f83472a5-99a9-4042-a028-dd94de7e708f-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 06 15:13:58 crc kubenswrapper[4757]: I1006 15:13:58.432491 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c5n2d\" (UniqueName: \"kubernetes.io/projected/f83472a5-99a9-4042-a028-dd94de7e708f-kube-api-access-c5n2d\") on node \"crc\" DevicePath \"\"" Oct 06 15:13:58 crc kubenswrapper[4757]: I1006 15:13:58.675909 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6ggg2" event={"ID":"f83472a5-99a9-4042-a028-dd94de7e708f","Type":"ContainerDied","Data":"3cbb0537835e7fa4d518c263c3bfbc5bb024a848bef2581e2ca7ec425aaed44c"} Oct 06 15:13:58 crc kubenswrapper[4757]: I1006 15:13:58.675957 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6ggg2" Oct 06 15:13:58 crc kubenswrapper[4757]: I1006 15:13:58.675963 4757 scope.go:117] "RemoveContainer" containerID="97a668331912085fb247d93c181f25975617e688a5db2d02e6abbf946f00ca50" Oct 06 15:13:58 crc kubenswrapper[4757]: I1006 15:13:58.706009 4757 scope.go:117] "RemoveContainer" containerID="38dbbba58e2119168352162b242bf05f7183734f2054a01d345e86818e07cadd" Oct 06 15:13:58 crc kubenswrapper[4757]: I1006 15:13:58.712980 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-6ggg2"] Oct 06 15:13:58 crc kubenswrapper[4757]: I1006 15:13:58.725534 4757 scope.go:117] "RemoveContainer" containerID="71395402cbaca279dc70fd1a26c6739d3dab3f8c0d99a600c8bcb4b136c877aa" Oct 06 15:13:58 crc kubenswrapper[4757]: I1006 15:13:58.741180 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-6ggg2"] Oct 06 15:14:00 crc kubenswrapper[4757]: I1006 15:14:00.191287 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f83472a5-99a9-4042-a028-dd94de7e708f" path="/var/lib/kubelet/pods/f83472a5-99a9-4042-a028-dd94de7e708f/volumes" Oct 06 15:14:03 crc kubenswrapper[4757]: I1006 15:14:03.180282 4757 scope.go:117] "RemoveContainer" containerID="c9a01d3b8e651ed2a273ac73def6d0e86398bfbdd3b94e4d6d085ceec8dc0a3f" Oct 06 15:14:03 crc kubenswrapper[4757]: E1006 15:14:03.181916 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 15:14:18 crc kubenswrapper[4757]: I1006 15:14:18.184372 4757 scope.go:117] "RemoveContainer" containerID="c9a01d3b8e651ed2a273ac73def6d0e86398bfbdd3b94e4d6d085ceec8dc0a3f" Oct 06 15:14:18 crc kubenswrapper[4757]: E1006 15:14:18.185409 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 15:14:29 crc kubenswrapper[4757]: I1006 15:14:29.181007 4757 scope.go:117] "RemoveContainer" containerID="c9a01d3b8e651ed2a273ac73def6d0e86398bfbdd3b94e4d6d085ceec8dc0a3f" Oct 06 15:14:29 crc kubenswrapper[4757]: E1006 15:14:29.182407 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 15:14:37 crc kubenswrapper[4757]: I1006 15:14:37.853179 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-jqk5s"] Oct 06 15:14:37 crc kubenswrapper[4757]: E1006 15:14:37.855931 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f83472a5-99a9-4042-a028-dd94de7e708f" containerName="extract-utilities" Oct 06 15:14:37 crc kubenswrapper[4757]: I1006 15:14:37.856078 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="f83472a5-99a9-4042-a028-dd94de7e708f" containerName="extract-utilities" Oct 06 15:14:37 crc kubenswrapper[4757]: E1006 15:14:37.856262 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f83472a5-99a9-4042-a028-dd94de7e708f" containerName="registry-server" Oct 06 15:14:37 crc kubenswrapper[4757]: I1006 15:14:37.856382 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="f83472a5-99a9-4042-a028-dd94de7e708f" containerName="registry-server" Oct 06 15:14:37 crc kubenswrapper[4757]: E1006 15:14:37.856531 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f83472a5-99a9-4042-a028-dd94de7e708f" containerName="extract-content" Oct 06 15:14:37 crc kubenswrapper[4757]: I1006 15:14:37.861484 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="f83472a5-99a9-4042-a028-dd94de7e708f" containerName="extract-content" Oct 06 15:14:37 crc kubenswrapper[4757]: I1006 15:14:37.862829 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="f83472a5-99a9-4042-a028-dd94de7e708f" containerName="registry-server" Oct 06 15:14:37 crc kubenswrapper[4757]: I1006 15:14:37.865510 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jqk5s" Oct 06 15:14:37 crc kubenswrapper[4757]: I1006 15:14:37.877767 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-jqk5s"] Oct 06 15:14:37 crc kubenswrapper[4757]: I1006 15:14:37.895482 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7db35c26-1b14-48ff-bf11-1ad09f811703-utilities\") pod \"redhat-operators-jqk5s\" (UID: \"7db35c26-1b14-48ff-bf11-1ad09f811703\") " pod="openshift-marketplace/redhat-operators-jqk5s" Oct 06 15:14:37 crc kubenswrapper[4757]: I1006 15:14:37.895564 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gnjrq\" (UniqueName: \"kubernetes.io/projected/7db35c26-1b14-48ff-bf11-1ad09f811703-kube-api-access-gnjrq\") pod \"redhat-operators-jqk5s\" (UID: \"7db35c26-1b14-48ff-bf11-1ad09f811703\") " pod="openshift-marketplace/redhat-operators-jqk5s" Oct 06 15:14:37 crc kubenswrapper[4757]: I1006 15:14:37.895630 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7db35c26-1b14-48ff-bf11-1ad09f811703-catalog-content\") pod \"redhat-operators-jqk5s\" (UID: \"7db35c26-1b14-48ff-bf11-1ad09f811703\") " pod="openshift-marketplace/redhat-operators-jqk5s" Oct 06 15:14:37 crc kubenswrapper[4757]: I1006 15:14:37.997841 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7db35c26-1b14-48ff-bf11-1ad09f811703-catalog-content\") pod \"redhat-operators-jqk5s\" (UID: \"7db35c26-1b14-48ff-bf11-1ad09f811703\") " pod="openshift-marketplace/redhat-operators-jqk5s" Oct 06 15:14:37 crc kubenswrapper[4757]: I1006 15:14:37.998003 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7db35c26-1b14-48ff-bf11-1ad09f811703-utilities\") pod \"redhat-operators-jqk5s\" (UID: \"7db35c26-1b14-48ff-bf11-1ad09f811703\") " pod="openshift-marketplace/redhat-operators-jqk5s" Oct 06 15:14:37 crc kubenswrapper[4757]: I1006 15:14:37.998112 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gnjrq\" (UniqueName: \"kubernetes.io/projected/7db35c26-1b14-48ff-bf11-1ad09f811703-kube-api-access-gnjrq\") pod \"redhat-operators-jqk5s\" (UID: \"7db35c26-1b14-48ff-bf11-1ad09f811703\") " pod="openshift-marketplace/redhat-operators-jqk5s" Oct 06 15:14:37 crc kubenswrapper[4757]: I1006 15:14:37.998525 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7db35c26-1b14-48ff-bf11-1ad09f811703-catalog-content\") pod \"redhat-operators-jqk5s\" (UID: \"7db35c26-1b14-48ff-bf11-1ad09f811703\") " pod="openshift-marketplace/redhat-operators-jqk5s" Oct 06 15:14:37 crc kubenswrapper[4757]: I1006 15:14:37.998711 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7db35c26-1b14-48ff-bf11-1ad09f811703-utilities\") pod \"redhat-operators-jqk5s\" (UID: \"7db35c26-1b14-48ff-bf11-1ad09f811703\") " pod="openshift-marketplace/redhat-operators-jqk5s" Oct 06 15:14:38 crc kubenswrapper[4757]: I1006 15:14:38.020729 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gnjrq\" (UniqueName: \"kubernetes.io/projected/7db35c26-1b14-48ff-bf11-1ad09f811703-kube-api-access-gnjrq\") pod \"redhat-operators-jqk5s\" (UID: \"7db35c26-1b14-48ff-bf11-1ad09f811703\") " pod="openshift-marketplace/redhat-operators-jqk5s" Oct 06 15:14:38 crc kubenswrapper[4757]: I1006 15:14:38.227114 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jqk5s" Oct 06 15:14:38 crc kubenswrapper[4757]: I1006 15:14:38.701837 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-jqk5s"] Oct 06 15:14:39 crc kubenswrapper[4757]: I1006 15:14:39.060583 4757 generic.go:334] "Generic (PLEG): container finished" podID="7db35c26-1b14-48ff-bf11-1ad09f811703" containerID="1350c93e519b10d1d175efc839d3aabf039e56a1df396bf658e1ed35bafcc5f4" exitCode=0 Oct 06 15:14:39 crc kubenswrapper[4757]: I1006 15:14:39.060629 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jqk5s" event={"ID":"7db35c26-1b14-48ff-bf11-1ad09f811703","Type":"ContainerDied","Data":"1350c93e519b10d1d175efc839d3aabf039e56a1df396bf658e1ed35bafcc5f4"} Oct 06 15:14:39 crc kubenswrapper[4757]: I1006 15:14:39.060855 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jqk5s" event={"ID":"7db35c26-1b14-48ff-bf11-1ad09f811703","Type":"ContainerStarted","Data":"a645e7facab7a79f628a48d1f00bd4263a58aa3d9d1f37bfc91cc53e62a69fe9"} Oct 06 15:14:39 crc kubenswrapper[4757]: I1006 15:14:39.284898 4757 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 06 15:14:40 crc kubenswrapper[4757]: I1006 15:14:40.073160 4757 generic.go:334] "Generic (PLEG): container finished" podID="7f708089-ac28-43ea-acdf-0e97f359af6d" containerID="f27b6e7ee1ae9de3c0de1681575597e783fe8b25fb0e506e3089c0f4c2f33e8c" exitCode=0 Oct 06 15:14:40 crc kubenswrapper[4757]: I1006 15:14:40.073261 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-25nfh/crc-debug-5b25j" event={"ID":"7f708089-ac28-43ea-acdf-0e97f359af6d","Type":"ContainerDied","Data":"f27b6e7ee1ae9de3c0de1681575597e783fe8b25fb0e506e3089c0f4c2f33e8c"} Oct 06 15:14:41 crc kubenswrapper[4757]: I1006 15:14:41.088253 4757 generic.go:334] "Generic (PLEG): container finished" podID="7db35c26-1b14-48ff-bf11-1ad09f811703" containerID="48e0d7cc8769182eba47802ecb4e0e6151d740fc29d3f5a391507f82a23be307" exitCode=0 Oct 06 15:14:41 crc kubenswrapper[4757]: I1006 15:14:41.088351 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jqk5s" event={"ID":"7db35c26-1b14-48ff-bf11-1ad09f811703","Type":"ContainerDied","Data":"48e0d7cc8769182eba47802ecb4e0e6151d740fc29d3f5a391507f82a23be307"} Oct 06 15:14:41 crc kubenswrapper[4757]: I1006 15:14:41.182670 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-25nfh/crc-debug-5b25j" Oct 06 15:14:41 crc kubenswrapper[4757]: I1006 15:14:41.221125 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-25nfh/crc-debug-5b25j"] Oct 06 15:14:41 crc kubenswrapper[4757]: I1006 15:14:41.229375 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-25nfh/crc-debug-5b25j"] Oct 06 15:14:41 crc kubenswrapper[4757]: I1006 15:14:41.361371 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bdpn4\" (UniqueName: \"kubernetes.io/projected/7f708089-ac28-43ea-acdf-0e97f359af6d-kube-api-access-bdpn4\") pod \"7f708089-ac28-43ea-acdf-0e97f359af6d\" (UID: \"7f708089-ac28-43ea-acdf-0e97f359af6d\") " Oct 06 15:14:41 crc kubenswrapper[4757]: I1006 15:14:41.361539 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7f708089-ac28-43ea-acdf-0e97f359af6d-host\") pod \"7f708089-ac28-43ea-acdf-0e97f359af6d\" (UID: \"7f708089-ac28-43ea-acdf-0e97f359af6d\") " Oct 06 15:14:41 crc kubenswrapper[4757]: I1006 15:14:41.361578 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7f708089-ac28-43ea-acdf-0e97f359af6d-host" (OuterVolumeSpecName: "host") pod "7f708089-ac28-43ea-acdf-0e97f359af6d" (UID: "7f708089-ac28-43ea-acdf-0e97f359af6d"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 06 15:14:41 crc kubenswrapper[4757]: I1006 15:14:41.361994 4757 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7f708089-ac28-43ea-acdf-0e97f359af6d-host\") on node \"crc\" DevicePath \"\"" Oct 06 15:14:41 crc kubenswrapper[4757]: I1006 15:14:41.367491 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7f708089-ac28-43ea-acdf-0e97f359af6d-kube-api-access-bdpn4" (OuterVolumeSpecName: "kube-api-access-bdpn4") pod "7f708089-ac28-43ea-acdf-0e97f359af6d" (UID: "7f708089-ac28-43ea-acdf-0e97f359af6d"). InnerVolumeSpecName "kube-api-access-bdpn4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 15:14:41 crc kubenswrapper[4757]: I1006 15:14:41.463570 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bdpn4\" (UniqueName: \"kubernetes.io/projected/7f708089-ac28-43ea-acdf-0e97f359af6d-kube-api-access-bdpn4\") on node \"crc\" DevicePath \"\"" Oct 06 15:14:42 crc kubenswrapper[4757]: I1006 15:14:42.102881 4757 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9d7f5644e195964cbfd419de5d7922607a98cb1e342c29bb671735300fba4674" Oct 06 15:14:42 crc kubenswrapper[4757]: I1006 15:14:42.102948 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-25nfh/crc-debug-5b25j" Oct 06 15:14:42 crc kubenswrapper[4757]: I1006 15:14:42.106233 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jqk5s" event={"ID":"7db35c26-1b14-48ff-bf11-1ad09f811703","Type":"ContainerStarted","Data":"88c912771f55e3b73a3bd9e7b354dd76bf8fa7ce4da0f3b10db9434c1fb18d79"} Oct 06 15:14:42 crc kubenswrapper[4757]: I1006 15:14:42.198354 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7f708089-ac28-43ea-acdf-0e97f359af6d" path="/var/lib/kubelet/pods/7f708089-ac28-43ea-acdf-0e97f359af6d/volumes" Oct 06 15:14:42 crc kubenswrapper[4757]: I1006 15:14:42.448262 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-jqk5s" podStartSLOduration=3.054668547 podStartE2EDuration="5.448245819s" podCreationTimestamp="2025-10-06 15:14:37 +0000 UTC" firstStartedPulling="2025-10-06 15:14:39.284584546 +0000 UTC m=+5767.781903083" lastFinishedPulling="2025-10-06 15:14:41.678161818 +0000 UTC m=+5770.175480355" observedRunningTime="2025-10-06 15:14:42.136553668 +0000 UTC m=+5770.633872245" watchObservedRunningTime="2025-10-06 15:14:42.448245819 +0000 UTC m=+5770.945564356" Oct 06 15:14:42 crc kubenswrapper[4757]: I1006 15:14:42.451398 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-25nfh/crc-debug-6l46n"] Oct 06 15:14:42 crc kubenswrapper[4757]: E1006 15:14:42.451736 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7f708089-ac28-43ea-acdf-0e97f359af6d" containerName="container-00" Oct 06 15:14:42 crc kubenswrapper[4757]: I1006 15:14:42.451752 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="7f708089-ac28-43ea-acdf-0e97f359af6d" containerName="container-00" Oct 06 15:14:42 crc kubenswrapper[4757]: I1006 15:14:42.451898 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="7f708089-ac28-43ea-acdf-0e97f359af6d" containerName="container-00" Oct 06 15:14:42 crc kubenswrapper[4757]: I1006 15:14:42.452465 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-25nfh/crc-debug-6l46n" Oct 06 15:14:42 crc kubenswrapper[4757]: I1006 15:14:42.585737 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/546626fe-53f0-4c34-8fb5-a5d42af96231-host\") pod \"crc-debug-6l46n\" (UID: \"546626fe-53f0-4c34-8fb5-a5d42af96231\") " pod="openshift-must-gather-25nfh/crc-debug-6l46n" Oct 06 15:14:42 crc kubenswrapper[4757]: I1006 15:14:42.586134 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-28c98\" (UniqueName: \"kubernetes.io/projected/546626fe-53f0-4c34-8fb5-a5d42af96231-kube-api-access-28c98\") pod \"crc-debug-6l46n\" (UID: \"546626fe-53f0-4c34-8fb5-a5d42af96231\") " pod="openshift-must-gather-25nfh/crc-debug-6l46n" Oct 06 15:14:42 crc kubenswrapper[4757]: I1006 15:14:42.688296 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/546626fe-53f0-4c34-8fb5-a5d42af96231-host\") pod \"crc-debug-6l46n\" (UID: \"546626fe-53f0-4c34-8fb5-a5d42af96231\") " pod="openshift-must-gather-25nfh/crc-debug-6l46n" Oct 06 15:14:42 crc kubenswrapper[4757]: I1006 15:14:42.688452 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-28c98\" (UniqueName: \"kubernetes.io/projected/546626fe-53f0-4c34-8fb5-a5d42af96231-kube-api-access-28c98\") pod \"crc-debug-6l46n\" (UID: \"546626fe-53f0-4c34-8fb5-a5d42af96231\") " pod="openshift-must-gather-25nfh/crc-debug-6l46n" Oct 06 15:14:42 crc kubenswrapper[4757]: I1006 15:14:42.688493 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/546626fe-53f0-4c34-8fb5-a5d42af96231-host\") pod \"crc-debug-6l46n\" (UID: \"546626fe-53f0-4c34-8fb5-a5d42af96231\") " pod="openshift-must-gather-25nfh/crc-debug-6l46n" Oct 06 15:14:42 crc kubenswrapper[4757]: I1006 15:14:42.707793 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-28c98\" (UniqueName: \"kubernetes.io/projected/546626fe-53f0-4c34-8fb5-a5d42af96231-kube-api-access-28c98\") pod \"crc-debug-6l46n\" (UID: \"546626fe-53f0-4c34-8fb5-a5d42af96231\") " pod="openshift-must-gather-25nfh/crc-debug-6l46n" Oct 06 15:14:42 crc kubenswrapper[4757]: I1006 15:14:42.780817 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-25nfh/crc-debug-6l46n" Oct 06 15:14:42 crc kubenswrapper[4757]: W1006 15:14:42.810068 4757 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod546626fe_53f0_4c34_8fb5_a5d42af96231.slice/crio-c19930e37fb1eb43da4f5eef7b60a364be1affe7f0161c612fcaabafb66ccaf5 WatchSource:0}: Error finding container c19930e37fb1eb43da4f5eef7b60a364be1affe7f0161c612fcaabafb66ccaf5: Status 404 returned error can't find the container with id c19930e37fb1eb43da4f5eef7b60a364be1affe7f0161c612fcaabafb66ccaf5 Oct 06 15:14:43 crc kubenswrapper[4757]: I1006 15:14:43.115134 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-25nfh/crc-debug-6l46n" event={"ID":"546626fe-53f0-4c34-8fb5-a5d42af96231","Type":"ContainerStarted","Data":"341a8e8f6966271ea7804170ab72d6ca20880d13b0857e2cf193037a85d51ae5"} Oct 06 15:14:43 crc kubenswrapper[4757]: I1006 15:14:43.115206 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-25nfh/crc-debug-6l46n" event={"ID":"546626fe-53f0-4c34-8fb5-a5d42af96231","Type":"ContainerStarted","Data":"c19930e37fb1eb43da4f5eef7b60a364be1affe7f0161c612fcaabafb66ccaf5"} Oct 06 15:14:43 crc kubenswrapper[4757]: I1006 15:14:43.132657 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-25nfh/crc-debug-6l46n" podStartSLOduration=1.132639345 podStartE2EDuration="1.132639345s" podCreationTimestamp="2025-10-06 15:14:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 15:14:43.127260252 +0000 UTC m=+5771.624578799" watchObservedRunningTime="2025-10-06 15:14:43.132639345 +0000 UTC m=+5771.629957882" Oct 06 15:14:43 crc kubenswrapper[4757]: I1006 15:14:43.180028 4757 scope.go:117] "RemoveContainer" containerID="c9a01d3b8e651ed2a273ac73def6d0e86398bfbdd3b94e4d6d085ceec8dc0a3f" Oct 06 15:14:43 crc kubenswrapper[4757]: E1006 15:14:43.180271 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 15:14:44 crc kubenswrapper[4757]: I1006 15:14:44.125734 4757 generic.go:334] "Generic (PLEG): container finished" podID="546626fe-53f0-4c34-8fb5-a5d42af96231" containerID="341a8e8f6966271ea7804170ab72d6ca20880d13b0857e2cf193037a85d51ae5" exitCode=0 Oct 06 15:14:44 crc kubenswrapper[4757]: I1006 15:14:44.125781 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-25nfh/crc-debug-6l46n" event={"ID":"546626fe-53f0-4c34-8fb5-a5d42af96231","Type":"ContainerDied","Data":"341a8e8f6966271ea7804170ab72d6ca20880d13b0857e2cf193037a85d51ae5"} Oct 06 15:14:45 crc kubenswrapper[4757]: I1006 15:14:45.253415 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-25nfh/crc-debug-6l46n" Oct 06 15:14:45 crc kubenswrapper[4757]: I1006 15:14:45.423080 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-28c98\" (UniqueName: \"kubernetes.io/projected/546626fe-53f0-4c34-8fb5-a5d42af96231-kube-api-access-28c98\") pod \"546626fe-53f0-4c34-8fb5-a5d42af96231\" (UID: \"546626fe-53f0-4c34-8fb5-a5d42af96231\") " Oct 06 15:14:45 crc kubenswrapper[4757]: I1006 15:14:45.423279 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/546626fe-53f0-4c34-8fb5-a5d42af96231-host\") pod \"546626fe-53f0-4c34-8fb5-a5d42af96231\" (UID: \"546626fe-53f0-4c34-8fb5-a5d42af96231\") " Oct 06 15:14:45 crc kubenswrapper[4757]: I1006 15:14:45.423434 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/546626fe-53f0-4c34-8fb5-a5d42af96231-host" (OuterVolumeSpecName: "host") pod "546626fe-53f0-4c34-8fb5-a5d42af96231" (UID: "546626fe-53f0-4c34-8fb5-a5d42af96231"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 06 15:14:45 crc kubenswrapper[4757]: I1006 15:14:45.423741 4757 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/546626fe-53f0-4c34-8fb5-a5d42af96231-host\") on node \"crc\" DevicePath \"\"" Oct 06 15:14:45 crc kubenswrapper[4757]: I1006 15:14:45.434305 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/546626fe-53f0-4c34-8fb5-a5d42af96231-kube-api-access-28c98" (OuterVolumeSpecName: "kube-api-access-28c98") pod "546626fe-53f0-4c34-8fb5-a5d42af96231" (UID: "546626fe-53f0-4c34-8fb5-a5d42af96231"). InnerVolumeSpecName "kube-api-access-28c98". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 15:14:45 crc kubenswrapper[4757]: I1006 15:14:45.524968 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-28c98\" (UniqueName: \"kubernetes.io/projected/546626fe-53f0-4c34-8fb5-a5d42af96231-kube-api-access-28c98\") on node \"crc\" DevicePath \"\"" Oct 06 15:14:46 crc kubenswrapper[4757]: I1006 15:14:46.146583 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-25nfh/crc-debug-6l46n" event={"ID":"546626fe-53f0-4c34-8fb5-a5d42af96231","Type":"ContainerDied","Data":"c19930e37fb1eb43da4f5eef7b60a364be1affe7f0161c612fcaabafb66ccaf5"} Oct 06 15:14:46 crc kubenswrapper[4757]: I1006 15:14:46.147052 4757 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c19930e37fb1eb43da4f5eef7b60a364be1affe7f0161c612fcaabafb66ccaf5" Oct 06 15:14:46 crc kubenswrapper[4757]: I1006 15:14:46.146641 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-25nfh/crc-debug-6l46n" Oct 06 15:14:48 crc kubenswrapper[4757]: I1006 15:14:48.227308 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-jqk5s" Oct 06 15:14:48 crc kubenswrapper[4757]: I1006 15:14:48.227380 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-jqk5s" Oct 06 15:14:48 crc kubenswrapper[4757]: I1006 15:14:48.284489 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-jqk5s" Oct 06 15:14:49 crc kubenswrapper[4757]: I1006 15:14:49.213148 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-jqk5s" Oct 06 15:14:49 crc kubenswrapper[4757]: I1006 15:14:49.254036 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-jqk5s"] Oct 06 15:14:50 crc kubenswrapper[4757]: I1006 15:14:50.147723 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-25nfh/crc-debug-6l46n"] Oct 06 15:14:50 crc kubenswrapper[4757]: I1006 15:14:50.154315 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-25nfh/crc-debug-6l46n"] Oct 06 15:14:50 crc kubenswrapper[4757]: I1006 15:14:50.190505 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="546626fe-53f0-4c34-8fb5-a5d42af96231" path="/var/lib/kubelet/pods/546626fe-53f0-4c34-8fb5-a5d42af96231/volumes" Oct 06 15:14:51 crc kubenswrapper[4757]: I1006 15:14:51.195107 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-jqk5s" podUID="7db35c26-1b14-48ff-bf11-1ad09f811703" containerName="registry-server" containerID="cri-o://88c912771f55e3b73a3bd9e7b354dd76bf8fa7ce4da0f3b10db9434c1fb18d79" gracePeriod=2 Oct 06 15:14:51 crc kubenswrapper[4757]: I1006 15:14:51.320772 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-25nfh/crc-debug-sff4x"] Oct 06 15:14:51 crc kubenswrapper[4757]: E1006 15:14:51.321131 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="546626fe-53f0-4c34-8fb5-a5d42af96231" containerName="container-00" Oct 06 15:14:51 crc kubenswrapper[4757]: I1006 15:14:51.321149 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="546626fe-53f0-4c34-8fb5-a5d42af96231" containerName="container-00" Oct 06 15:14:51 crc kubenswrapper[4757]: I1006 15:14:51.321353 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="546626fe-53f0-4c34-8fb5-a5d42af96231" containerName="container-00" Oct 06 15:14:51 crc kubenswrapper[4757]: I1006 15:14:51.321909 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-25nfh/crc-debug-sff4x" Oct 06 15:14:51 crc kubenswrapper[4757]: I1006 15:14:51.424324 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/41d43ac6-f865-47af-b369-af21e49c92d9-host\") pod \"crc-debug-sff4x\" (UID: \"41d43ac6-f865-47af-b369-af21e49c92d9\") " pod="openshift-must-gather-25nfh/crc-debug-sff4x" Oct 06 15:14:51 crc kubenswrapper[4757]: I1006 15:14:51.424528 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h5l79\" (UniqueName: \"kubernetes.io/projected/41d43ac6-f865-47af-b369-af21e49c92d9-kube-api-access-h5l79\") pod \"crc-debug-sff4x\" (UID: \"41d43ac6-f865-47af-b369-af21e49c92d9\") " pod="openshift-must-gather-25nfh/crc-debug-sff4x" Oct 06 15:14:51 crc kubenswrapper[4757]: I1006 15:14:51.527077 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h5l79\" (UniqueName: \"kubernetes.io/projected/41d43ac6-f865-47af-b369-af21e49c92d9-kube-api-access-h5l79\") pod \"crc-debug-sff4x\" (UID: \"41d43ac6-f865-47af-b369-af21e49c92d9\") " pod="openshift-must-gather-25nfh/crc-debug-sff4x" Oct 06 15:14:51 crc kubenswrapper[4757]: I1006 15:14:51.527898 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/41d43ac6-f865-47af-b369-af21e49c92d9-host\") pod \"crc-debug-sff4x\" (UID: \"41d43ac6-f865-47af-b369-af21e49c92d9\") " pod="openshift-must-gather-25nfh/crc-debug-sff4x" Oct 06 15:14:51 crc kubenswrapper[4757]: I1006 15:14:51.527982 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/41d43ac6-f865-47af-b369-af21e49c92d9-host\") pod \"crc-debug-sff4x\" (UID: \"41d43ac6-f865-47af-b369-af21e49c92d9\") " pod="openshift-must-gather-25nfh/crc-debug-sff4x" Oct 06 15:14:51 crc kubenswrapper[4757]: I1006 15:14:51.551836 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h5l79\" (UniqueName: \"kubernetes.io/projected/41d43ac6-f865-47af-b369-af21e49c92d9-kube-api-access-h5l79\") pod \"crc-debug-sff4x\" (UID: \"41d43ac6-f865-47af-b369-af21e49c92d9\") " pod="openshift-must-gather-25nfh/crc-debug-sff4x" Oct 06 15:14:51 crc kubenswrapper[4757]: I1006 15:14:51.640568 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-25nfh/crc-debug-sff4x" Oct 06 15:14:52 crc kubenswrapper[4757]: I1006 15:14:52.204545 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-25nfh/crc-debug-sff4x" event={"ID":"41d43ac6-f865-47af-b369-af21e49c92d9","Type":"ContainerStarted","Data":"338ee3fc862226b08e464d81d28593ac3df0595f2820ea7f77575a79a4571232"} Oct 06 15:14:53 crc kubenswrapper[4757]: I1006 15:14:53.217247 4757 generic.go:334] "Generic (PLEG): container finished" podID="7db35c26-1b14-48ff-bf11-1ad09f811703" containerID="88c912771f55e3b73a3bd9e7b354dd76bf8fa7ce4da0f3b10db9434c1fb18d79" exitCode=0 Oct 06 15:14:53 crc kubenswrapper[4757]: I1006 15:14:53.217320 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jqk5s" event={"ID":"7db35c26-1b14-48ff-bf11-1ad09f811703","Type":"ContainerDied","Data":"88c912771f55e3b73a3bd9e7b354dd76bf8fa7ce4da0f3b10db9434c1fb18d79"} Oct 06 15:14:54 crc kubenswrapper[4757]: I1006 15:14:54.013772 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jqk5s" Oct 06 15:14:54 crc kubenswrapper[4757]: I1006 15:14:54.178897 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gnjrq\" (UniqueName: \"kubernetes.io/projected/7db35c26-1b14-48ff-bf11-1ad09f811703-kube-api-access-gnjrq\") pod \"7db35c26-1b14-48ff-bf11-1ad09f811703\" (UID: \"7db35c26-1b14-48ff-bf11-1ad09f811703\") " Oct 06 15:14:54 crc kubenswrapper[4757]: I1006 15:14:54.179175 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7db35c26-1b14-48ff-bf11-1ad09f811703-catalog-content\") pod \"7db35c26-1b14-48ff-bf11-1ad09f811703\" (UID: \"7db35c26-1b14-48ff-bf11-1ad09f811703\") " Oct 06 15:14:54 crc kubenswrapper[4757]: I1006 15:14:54.179415 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7db35c26-1b14-48ff-bf11-1ad09f811703-utilities\") pod \"7db35c26-1b14-48ff-bf11-1ad09f811703\" (UID: \"7db35c26-1b14-48ff-bf11-1ad09f811703\") " Oct 06 15:14:54 crc kubenswrapper[4757]: I1006 15:14:54.181451 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7db35c26-1b14-48ff-bf11-1ad09f811703-utilities" (OuterVolumeSpecName: "utilities") pod "7db35c26-1b14-48ff-bf11-1ad09f811703" (UID: "7db35c26-1b14-48ff-bf11-1ad09f811703"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 15:14:54 crc kubenswrapper[4757]: I1006 15:14:54.187853 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7db35c26-1b14-48ff-bf11-1ad09f811703-kube-api-access-gnjrq" (OuterVolumeSpecName: "kube-api-access-gnjrq") pod "7db35c26-1b14-48ff-bf11-1ad09f811703" (UID: "7db35c26-1b14-48ff-bf11-1ad09f811703"). InnerVolumeSpecName "kube-api-access-gnjrq". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 15:14:54 crc kubenswrapper[4757]: I1006 15:14:54.230385 4757 generic.go:334] "Generic (PLEG): container finished" podID="41d43ac6-f865-47af-b369-af21e49c92d9" containerID="77db08b6eba49cd7b41eee70ed0ec83409e8e247241e312a812b0d02190eabb9" exitCode=0 Oct 06 15:14:54 crc kubenswrapper[4757]: I1006 15:14:54.236464 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jqk5s" Oct 06 15:14:54 crc kubenswrapper[4757]: I1006 15:14:54.258075 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-25nfh/crc-debug-sff4x" event={"ID":"41d43ac6-f865-47af-b369-af21e49c92d9","Type":"ContainerDied","Data":"77db08b6eba49cd7b41eee70ed0ec83409e8e247241e312a812b0d02190eabb9"} Oct 06 15:14:54 crc kubenswrapper[4757]: I1006 15:14:54.258608 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jqk5s" event={"ID":"7db35c26-1b14-48ff-bf11-1ad09f811703","Type":"ContainerDied","Data":"a645e7facab7a79f628a48d1f00bd4263a58aa3d9d1f37bfc91cc53e62a69fe9"} Oct 06 15:14:54 crc kubenswrapper[4757]: I1006 15:14:54.258653 4757 scope.go:117] "RemoveContainer" containerID="88c912771f55e3b73a3bd9e7b354dd76bf8fa7ce4da0f3b10db9434c1fb18d79" Oct 06 15:14:54 crc kubenswrapper[4757]: I1006 15:14:54.281552 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gnjrq\" (UniqueName: \"kubernetes.io/projected/7db35c26-1b14-48ff-bf11-1ad09f811703-kube-api-access-gnjrq\") on node \"crc\" DevicePath \"\"" Oct 06 15:14:54 crc kubenswrapper[4757]: I1006 15:14:54.281587 4757 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7db35c26-1b14-48ff-bf11-1ad09f811703-utilities\") on node \"crc\" DevicePath \"\"" Oct 06 15:14:54 crc kubenswrapper[4757]: I1006 15:14:54.292180 4757 scope.go:117] "RemoveContainer" containerID="48e0d7cc8769182eba47802ecb4e0e6151d740fc29d3f5a391507f82a23be307" Oct 06 15:14:54 crc kubenswrapper[4757]: I1006 15:14:54.308749 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-25nfh/crc-debug-sff4x"] Oct 06 15:14:54 crc kubenswrapper[4757]: I1006 15:14:54.311405 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-25nfh/crc-debug-sff4x"] Oct 06 15:14:54 crc kubenswrapper[4757]: I1006 15:14:54.312087 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7db35c26-1b14-48ff-bf11-1ad09f811703-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7db35c26-1b14-48ff-bf11-1ad09f811703" (UID: "7db35c26-1b14-48ff-bf11-1ad09f811703"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 15:14:54 crc kubenswrapper[4757]: I1006 15:14:54.314125 4757 scope.go:117] "RemoveContainer" containerID="1350c93e519b10d1d175efc839d3aabf039e56a1df396bf658e1ed35bafcc5f4" Oct 06 15:14:54 crc kubenswrapper[4757]: I1006 15:14:54.383845 4757 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7db35c26-1b14-48ff-bf11-1ad09f811703-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 06 15:14:54 crc kubenswrapper[4757]: I1006 15:14:54.586252 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-jqk5s"] Oct 06 15:14:54 crc kubenswrapper[4757]: I1006 15:14:54.609722 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-jqk5s"] Oct 06 15:14:55 crc kubenswrapper[4757]: I1006 15:14:55.180506 4757 scope.go:117] "RemoveContainer" containerID="c9a01d3b8e651ed2a273ac73def6d0e86398bfbdd3b94e4d6d085ceec8dc0a3f" Oct 06 15:14:55 crc kubenswrapper[4757]: E1006 15:14:55.180858 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 15:14:55 crc kubenswrapper[4757]: I1006 15:14:55.347276 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-25nfh/crc-debug-sff4x" Oct 06 15:14:55 crc kubenswrapper[4757]: I1006 15:14:55.502154 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/41d43ac6-f865-47af-b369-af21e49c92d9-host\") pod \"41d43ac6-f865-47af-b369-af21e49c92d9\" (UID: \"41d43ac6-f865-47af-b369-af21e49c92d9\") " Oct 06 15:14:55 crc kubenswrapper[4757]: I1006 15:14:55.502297 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h5l79\" (UniqueName: \"kubernetes.io/projected/41d43ac6-f865-47af-b369-af21e49c92d9-kube-api-access-h5l79\") pod \"41d43ac6-f865-47af-b369-af21e49c92d9\" (UID: \"41d43ac6-f865-47af-b369-af21e49c92d9\") " Oct 06 15:14:55 crc kubenswrapper[4757]: I1006 15:14:55.502313 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/41d43ac6-f865-47af-b369-af21e49c92d9-host" (OuterVolumeSpecName: "host") pod "41d43ac6-f865-47af-b369-af21e49c92d9" (UID: "41d43ac6-f865-47af-b369-af21e49c92d9"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 06 15:14:55 crc kubenswrapper[4757]: I1006 15:14:55.502688 4757 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/41d43ac6-f865-47af-b369-af21e49c92d9-host\") on node \"crc\" DevicePath \"\"" Oct 06 15:14:55 crc kubenswrapper[4757]: I1006 15:14:55.508140 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/41d43ac6-f865-47af-b369-af21e49c92d9-kube-api-access-h5l79" (OuterVolumeSpecName: "kube-api-access-h5l79") pod "41d43ac6-f865-47af-b369-af21e49c92d9" (UID: "41d43ac6-f865-47af-b369-af21e49c92d9"). InnerVolumeSpecName "kube-api-access-h5l79". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 15:14:55 crc kubenswrapper[4757]: I1006 15:14:55.604361 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h5l79\" (UniqueName: \"kubernetes.io/projected/41d43ac6-f865-47af-b369-af21e49c92d9-kube-api-access-h5l79\") on node \"crc\" DevicePath \"\"" Oct 06 15:14:56 crc kubenswrapper[4757]: I1006 15:14:56.190420 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="41d43ac6-f865-47af-b369-af21e49c92d9" path="/var/lib/kubelet/pods/41d43ac6-f865-47af-b369-af21e49c92d9/volumes" Oct 06 15:14:56 crc kubenswrapper[4757]: I1006 15:14:56.190920 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7db35c26-1b14-48ff-bf11-1ad09f811703" path="/var/lib/kubelet/pods/7db35c26-1b14-48ff-bf11-1ad09f811703/volumes" Oct 06 15:14:56 crc kubenswrapper[4757]: I1006 15:14:56.254944 4757 scope.go:117] "RemoveContainer" containerID="77db08b6eba49cd7b41eee70ed0ec83409e8e247241e312a812b0d02190eabb9" Oct 06 15:14:56 crc kubenswrapper[4757]: I1006 15:14:56.254994 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-25nfh/crc-debug-sff4x" Oct 06 15:14:56 crc kubenswrapper[4757]: I1006 15:14:56.461979 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-5168-account-create-f2vck_3eafe25b-cbf4-46ea-9b52-4b8244574443/mariadb-account-create/0.log" Oct 06 15:14:56 crc kubenswrapper[4757]: I1006 15:14:56.647847 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-c9b794d8d-fv647_1e2f444f-a0a5-4204-92d9-e253d6c676a6/barbican-api-log/0.log" Oct 06 15:14:56 crc kubenswrapper[4757]: I1006 15:14:56.672020 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-c9b794d8d-fv647_1e2f444f-a0a5-4204-92d9-e253d6c676a6/barbican-api/0.log" Oct 06 15:14:56 crc kubenswrapper[4757]: I1006 15:14:56.828436 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-db-create-xc9j5_8aceb73c-695c-45ef-a519-3648dc1defda/mariadb-database-create/0.log" Oct 06 15:14:56 crc kubenswrapper[4757]: I1006 15:14:56.973258 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-db-sync-tl5mh_1d640aee-a2ac-4541-9a43-6db175d7ac71/barbican-db-sync/0.log" Oct 06 15:14:57 crc kubenswrapper[4757]: I1006 15:14:57.027910 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-76b567cf98-2ljlg_96d85f17-d1f2-4f91-a644-0959da17f29e/barbican-keystone-listener/0.log" Oct 06 15:14:57 crc kubenswrapper[4757]: I1006 15:14:57.044863 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-76b567cf98-2ljlg_96d85f17-d1f2-4f91-a644-0959da17f29e/barbican-keystone-listener-log/0.log" Oct 06 15:14:57 crc kubenswrapper[4757]: I1006 15:14:57.189150 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-8558c89d89-26z6j_d48cb0f6-a038-4b42-8414-89fd43612859/barbican-worker/0.log" Oct 06 15:14:57 crc kubenswrapper[4757]: I1006 15:14:57.239188 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-8558c89d89-26z6j_d48cb0f6-a038-4b42-8414-89fd43612859/barbican-worker-log/0.log" Oct 06 15:14:57 crc kubenswrapper[4757]: I1006 15:14:57.374413 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-5bb5b457f7-dd6rq_2c3c9933-3b11-4fc8-a1a2-ef87ba376118/init/0.log" Oct 06 15:14:57 crc kubenswrapper[4757]: I1006 15:14:57.517637 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-5bb5b457f7-dd6rq_2c3c9933-3b11-4fc8-a1a2-ef87ba376118/dnsmasq-dns/0.log" Oct 06 15:14:57 crc kubenswrapper[4757]: I1006 15:14:57.539355 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-5bb5b457f7-dd6rq_2c3c9933-3b11-4fc8-a1a2-ef87ba376118/init/0.log" Oct 06 15:14:57 crc kubenswrapper[4757]: I1006 15:14:57.580393 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-5c5bc6dbbd-g2zwx_15855896-fa59-487b-87b6-37fdfa827188/keystone-api/0.log" Oct 06 15:14:57 crc kubenswrapper[4757]: I1006 15:14:57.744361 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-b3d9-account-create-sfxqf_698f058d-bce6-40a2-ad66-9cb2a8990e42/mariadb-account-create/0.log" Oct 06 15:14:57 crc kubenswrapper[4757]: I1006 15:14:57.814052 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-bootstrap-7pmh5_7155f33d-aad8-4689-990c-799c2c1ab159/keystone-bootstrap/0.log" Oct 06 15:14:57 crc kubenswrapper[4757]: I1006 15:14:57.947724 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-db-create-phk2f_e6d9f5c5-06c1-4fdf-99be-31cd1bc00394/mariadb-database-create/0.log" Oct 06 15:14:58 crc kubenswrapper[4757]: I1006 15:14:58.044520 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-db-sync-qmc6b_8a6135bf-bb76-48a9-88a5-b6271ebc3bf9/keystone-db-sync/0.log" Oct 06 15:14:58 crc kubenswrapper[4757]: I1006 15:14:58.227010 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-copy-data_e5f3a4b5-f137-498a-a993-556989a82d82/adoption/0.log" Oct 06 15:14:58 crc kubenswrapper[4757]: I1006 15:14:58.466902 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-528f-account-create-xkcdt_95694796-073a-422c-83d8-97a9a5b7e000/mariadb-account-create/0.log" Oct 06 15:14:58 crc kubenswrapper[4757]: I1006 15:14:58.696906 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-597d785b9-4x6ld_90ffb97a-99b4-4c16-b9dc-6295fb5d17df/neutron-api/0.log" Oct 06 15:14:58 crc kubenswrapper[4757]: I1006 15:14:58.714709 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-597d785b9-4x6ld_90ffb97a-99b4-4c16-b9dc-6295fb5d17df/neutron-httpd/0.log" Oct 06 15:14:59 crc kubenswrapper[4757]: I1006 15:14:59.083384 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-db-create-rvnm9_7fa8484b-cc8e-4b19-a476-d3cec87675a3/mariadb-database-create/0.log" Oct 06 15:14:59 crc kubenswrapper[4757]: I1006 15:14:59.181063 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-db-sync-n59jv_e9b02124-fbb0-4055-9eb0-7e7f39db93e3/neutron-db-sync/0.log" Oct 06 15:14:59 crc kubenswrapper[4757]: I1006 15:14:59.278720 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_ca425bb4-93e3-4d5c-bf23-d7d8f8bb1c8e/mysql-bootstrap/0.log" Oct 06 15:14:59 crc kubenswrapper[4757]: I1006 15:14:59.481398 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_ca425bb4-93e3-4d5c-bf23-d7d8f8bb1c8e/mysql-bootstrap/0.log" Oct 06 15:14:59 crc kubenswrapper[4757]: I1006 15:14:59.511806 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_ca425bb4-93e3-4d5c-bf23-d7d8f8bb1c8e/galera/0.log" Oct 06 15:14:59 crc kubenswrapper[4757]: I1006 15:14:59.695662 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_34dcf335-306d-480f-ac3e-cee391886957/mysql-bootstrap/0.log" Oct 06 15:14:59 crc kubenswrapper[4757]: I1006 15:14:59.930816 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_34dcf335-306d-480f-ac3e-cee391886957/mysql-bootstrap/0.log" Oct 06 15:14:59 crc kubenswrapper[4757]: I1006 15:14:59.952109 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_34dcf335-306d-480f-ac3e-cee391886957/galera/0.log" Oct 06 15:15:00 crc kubenswrapper[4757]: I1006 15:15:00.153280 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29329395-mtmqt"] Oct 06 15:15:00 crc kubenswrapper[4757]: E1006 15:15:00.153625 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7db35c26-1b14-48ff-bf11-1ad09f811703" containerName="extract-utilities" Oct 06 15:15:00 crc kubenswrapper[4757]: I1006 15:15:00.153641 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="7db35c26-1b14-48ff-bf11-1ad09f811703" containerName="extract-utilities" Oct 06 15:15:00 crc kubenswrapper[4757]: E1006 15:15:00.153653 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7db35c26-1b14-48ff-bf11-1ad09f811703" containerName="registry-server" Oct 06 15:15:00 crc kubenswrapper[4757]: I1006 15:15:00.153660 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="7db35c26-1b14-48ff-bf11-1ad09f811703" containerName="registry-server" Oct 06 15:15:00 crc kubenswrapper[4757]: E1006 15:15:00.153671 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41d43ac6-f865-47af-b369-af21e49c92d9" containerName="container-00" Oct 06 15:15:00 crc kubenswrapper[4757]: I1006 15:15:00.153678 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="41d43ac6-f865-47af-b369-af21e49c92d9" containerName="container-00" Oct 06 15:15:00 crc kubenswrapper[4757]: E1006 15:15:00.153695 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7db35c26-1b14-48ff-bf11-1ad09f811703" containerName="extract-content" Oct 06 15:15:00 crc kubenswrapper[4757]: I1006 15:15:00.153704 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="7db35c26-1b14-48ff-bf11-1ad09f811703" containerName="extract-content" Oct 06 15:15:00 crc kubenswrapper[4757]: I1006 15:15:00.153858 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="7db35c26-1b14-48ff-bf11-1ad09f811703" containerName="registry-server" Oct 06 15:15:00 crc kubenswrapper[4757]: I1006 15:15:00.153882 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="41d43ac6-f865-47af-b369-af21e49c92d9" containerName="container-00" Oct 06 15:15:00 crc kubenswrapper[4757]: I1006 15:15:00.154465 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29329395-mtmqt" Oct 06 15:15:00 crc kubenswrapper[4757]: I1006 15:15:00.156531 4757 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 06 15:15:00 crc kubenswrapper[4757]: I1006 15:15:00.156832 4757 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 06 15:15:00 crc kubenswrapper[4757]: I1006 15:15:00.161137 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_8ba32982-b0f0-4865-9bd6-b249bbbfafd2/openstackclient/0.log" Oct 06 15:15:00 crc kubenswrapper[4757]: I1006 15:15:00.162795 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29329395-mtmqt"] Oct 06 15:15:00 crc kubenswrapper[4757]: I1006 15:15:00.197549 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-copy-data_5a471640-27f0-4f88-8488-fae3ef555c54/adoption/0.log" Oct 06 15:15:00 crc kubenswrapper[4757]: I1006 15:15:00.280863 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4d63ca01-dff7-48a1-8f3d-73a0adc96b02-config-volume\") pod \"collect-profiles-29329395-mtmqt\" (UID: \"4d63ca01-dff7-48a1-8f3d-73a0adc96b02\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29329395-mtmqt" Oct 06 15:15:00 crc kubenswrapper[4757]: I1006 15:15:00.280935 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4d63ca01-dff7-48a1-8f3d-73a0adc96b02-secret-volume\") pod \"collect-profiles-29329395-mtmqt\" (UID: \"4d63ca01-dff7-48a1-8f3d-73a0adc96b02\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29329395-mtmqt" Oct 06 15:15:00 crc kubenswrapper[4757]: I1006 15:15:00.281005 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7dzjp\" (UniqueName: \"kubernetes.io/projected/4d63ca01-dff7-48a1-8f3d-73a0adc96b02-kube-api-access-7dzjp\") pod \"collect-profiles-29329395-mtmqt\" (UID: \"4d63ca01-dff7-48a1-8f3d-73a0adc96b02\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29329395-mtmqt" Oct 06 15:15:00 crc kubenswrapper[4757]: I1006 15:15:00.379478 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_1514d2be-d117-4239-bb79-e73114d631fa/openstack-network-exporter/0.log" Oct 06 15:15:00 crc kubenswrapper[4757]: I1006 15:15:00.380421 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_1514d2be-d117-4239-bb79-e73114d631fa/ovn-northd/0.log" Oct 06 15:15:00 crc kubenswrapper[4757]: I1006 15:15:00.382196 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7dzjp\" (UniqueName: \"kubernetes.io/projected/4d63ca01-dff7-48a1-8f3d-73a0adc96b02-kube-api-access-7dzjp\") pod \"collect-profiles-29329395-mtmqt\" (UID: \"4d63ca01-dff7-48a1-8f3d-73a0adc96b02\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29329395-mtmqt" Oct 06 15:15:00 crc kubenswrapper[4757]: I1006 15:15:00.382277 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4d63ca01-dff7-48a1-8f3d-73a0adc96b02-config-volume\") pod \"collect-profiles-29329395-mtmqt\" (UID: \"4d63ca01-dff7-48a1-8f3d-73a0adc96b02\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29329395-mtmqt" Oct 06 15:15:00 crc kubenswrapper[4757]: I1006 15:15:00.382337 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4d63ca01-dff7-48a1-8f3d-73a0adc96b02-secret-volume\") pod \"collect-profiles-29329395-mtmqt\" (UID: \"4d63ca01-dff7-48a1-8f3d-73a0adc96b02\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29329395-mtmqt" Oct 06 15:15:00 crc kubenswrapper[4757]: I1006 15:15:00.383671 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4d63ca01-dff7-48a1-8f3d-73a0adc96b02-config-volume\") pod \"collect-profiles-29329395-mtmqt\" (UID: \"4d63ca01-dff7-48a1-8f3d-73a0adc96b02\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29329395-mtmqt" Oct 06 15:15:00 crc kubenswrapper[4757]: I1006 15:15:00.399446 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4d63ca01-dff7-48a1-8f3d-73a0adc96b02-secret-volume\") pod \"collect-profiles-29329395-mtmqt\" (UID: \"4d63ca01-dff7-48a1-8f3d-73a0adc96b02\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29329395-mtmqt" Oct 06 15:15:00 crc kubenswrapper[4757]: I1006 15:15:00.401214 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7dzjp\" (UniqueName: \"kubernetes.io/projected/4d63ca01-dff7-48a1-8f3d-73a0adc96b02-kube-api-access-7dzjp\") pod \"collect-profiles-29329395-mtmqt\" (UID: \"4d63ca01-dff7-48a1-8f3d-73a0adc96b02\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29329395-mtmqt" Oct 06 15:15:00 crc kubenswrapper[4757]: I1006 15:15:00.432107 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_beb86ee0-f17c-4c14-8e47-5dca77324eaa/memcached/0.log" Oct 06 15:15:00 crc kubenswrapper[4757]: I1006 15:15:00.510072 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29329395-mtmqt" Oct 06 15:15:00 crc kubenswrapper[4757]: I1006 15:15:00.569168 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_95d03e2a-e649-41e0-bddb-9523672e5e9c/openstack-network-exporter/0.log" Oct 06 15:15:00 crc kubenswrapper[4757]: I1006 15:15:00.575253 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_95d03e2a-e649-41e0-bddb-9523672e5e9c/ovsdbserver-nb/0.log" Oct 06 15:15:00 crc kubenswrapper[4757]: I1006 15:15:00.716881 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-1_66e11e3d-a78e-4706-a789-a997c6f73a64/openstack-network-exporter/0.log" Oct 06 15:15:00 crc kubenswrapper[4757]: I1006 15:15:00.718694 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-1_66e11e3d-a78e-4706-a789-a997c6f73a64/ovsdbserver-nb/0.log" Oct 06 15:15:00 crc kubenswrapper[4757]: I1006 15:15:00.811592 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-2_448d7cb5-8a9c-4430-acf9-e9d8662f9eaf/openstack-network-exporter/0.log" Oct 06 15:15:00 crc kubenswrapper[4757]: I1006 15:15:00.877627 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-2_448d7cb5-8a9c-4430-acf9-e9d8662f9eaf/ovsdbserver-nb/0.log" Oct 06 15:15:00 crc kubenswrapper[4757]: I1006 15:15:00.938886 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_be616da6-c336-4c71-b23c-3137a9a6c9ff/openstack-network-exporter/0.log" Oct 06 15:15:00 crc kubenswrapper[4757]: I1006 15:15:00.980237 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29329395-mtmqt"] Oct 06 15:15:01 crc kubenswrapper[4757]: I1006 15:15:01.005348 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_be616da6-c336-4c71-b23c-3137a9a6c9ff/ovsdbserver-sb/0.log" Oct 06 15:15:01 crc kubenswrapper[4757]: I1006 15:15:01.091785 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-1_65a44faf-fdf2-4ef5-9af8-6c31e0d9240b/openstack-network-exporter/0.log" Oct 06 15:15:01 crc kubenswrapper[4757]: I1006 15:15:01.136078 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-1_65a44faf-fdf2-4ef5-9af8-6c31e0d9240b/ovsdbserver-sb/0.log" Oct 06 15:15:01 crc kubenswrapper[4757]: I1006 15:15:01.236665 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-2_1fbe1234-296b-41ce-a03e-fde5b9abc505/openstack-network-exporter/0.log" Oct 06 15:15:01 crc kubenswrapper[4757]: I1006 15:15:01.279505 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-2_1fbe1234-296b-41ce-a03e-fde5b9abc505/ovsdbserver-sb/0.log" Oct 06 15:15:01 crc kubenswrapper[4757]: I1006 15:15:01.296184 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29329395-mtmqt" event={"ID":"4d63ca01-dff7-48a1-8f3d-73a0adc96b02","Type":"ContainerStarted","Data":"a6feec71f6e89d022edf12d30929be78188691b850ebe8b83a7bde63d7881b90"} Oct 06 15:15:01 crc kubenswrapper[4757]: I1006 15:15:01.296220 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29329395-mtmqt" event={"ID":"4d63ca01-dff7-48a1-8f3d-73a0adc96b02","Type":"ContainerStarted","Data":"0b1731b8dba4861201051f0298dfe4a6455f1dd9368cb03ead1164f661bbb3cf"} Oct 06 15:15:01 crc kubenswrapper[4757]: I1006 15:15:01.313773 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29329395-mtmqt" podStartSLOduration=1.313749773 podStartE2EDuration="1.313749773s" podCreationTimestamp="2025-10-06 15:15:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-06 15:15:01.311983355 +0000 UTC m=+5789.809301892" watchObservedRunningTime="2025-10-06 15:15:01.313749773 +0000 UTC m=+5789.811068300" Oct 06 15:15:01 crc kubenswrapper[4757]: I1006 15:15:01.365577 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_534874a5-63a4-4cd0-ab0d-27bea909d8a4/setup-container/0.log" Oct 06 15:15:01 crc kubenswrapper[4757]: I1006 15:15:01.528907 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_534874a5-63a4-4cd0-ab0d-27bea909d8a4/rabbitmq/0.log" Oct 06 15:15:01 crc kubenswrapper[4757]: I1006 15:15:01.551001 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_881e264b-7f54-49d0-8e26-c7eda5e6ab5a/setup-container/0.log" Oct 06 15:15:01 crc kubenswrapper[4757]: I1006 15:15:01.553032 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_534874a5-63a4-4cd0-ab0d-27bea909d8a4/setup-container/0.log" Oct 06 15:15:01 crc kubenswrapper[4757]: I1006 15:15:01.745226 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_881e264b-7f54-49d0-8e26-c7eda5e6ab5a/rabbitmq/0.log" Oct 06 15:15:01 crc kubenswrapper[4757]: I1006 15:15:01.756080 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_881e264b-7f54-49d0-8e26-c7eda5e6ab5a/setup-container/0.log" Oct 06 15:15:02 crc kubenswrapper[4757]: I1006 15:15:02.305058 4757 generic.go:334] "Generic (PLEG): container finished" podID="4d63ca01-dff7-48a1-8f3d-73a0adc96b02" containerID="a6feec71f6e89d022edf12d30929be78188691b850ebe8b83a7bde63d7881b90" exitCode=0 Oct 06 15:15:02 crc kubenswrapper[4757]: I1006 15:15:02.305124 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29329395-mtmqt" event={"ID":"4d63ca01-dff7-48a1-8f3d-73a0adc96b02","Type":"ContainerDied","Data":"a6feec71f6e89d022edf12d30929be78188691b850ebe8b83a7bde63d7881b90"} Oct 06 15:15:03 crc kubenswrapper[4757]: I1006 15:15:03.646169 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29329395-mtmqt" Oct 06 15:15:03 crc kubenswrapper[4757]: I1006 15:15:03.737691 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7dzjp\" (UniqueName: \"kubernetes.io/projected/4d63ca01-dff7-48a1-8f3d-73a0adc96b02-kube-api-access-7dzjp\") pod \"4d63ca01-dff7-48a1-8f3d-73a0adc96b02\" (UID: \"4d63ca01-dff7-48a1-8f3d-73a0adc96b02\") " Oct 06 15:15:03 crc kubenswrapper[4757]: I1006 15:15:03.737773 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4d63ca01-dff7-48a1-8f3d-73a0adc96b02-secret-volume\") pod \"4d63ca01-dff7-48a1-8f3d-73a0adc96b02\" (UID: \"4d63ca01-dff7-48a1-8f3d-73a0adc96b02\") " Oct 06 15:15:03 crc kubenswrapper[4757]: I1006 15:15:03.737829 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4d63ca01-dff7-48a1-8f3d-73a0adc96b02-config-volume\") pod \"4d63ca01-dff7-48a1-8f3d-73a0adc96b02\" (UID: \"4d63ca01-dff7-48a1-8f3d-73a0adc96b02\") " Oct 06 15:15:03 crc kubenswrapper[4757]: I1006 15:15:03.738669 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4d63ca01-dff7-48a1-8f3d-73a0adc96b02-config-volume" (OuterVolumeSpecName: "config-volume") pod "4d63ca01-dff7-48a1-8f3d-73a0adc96b02" (UID: "4d63ca01-dff7-48a1-8f3d-73a0adc96b02"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 06 15:15:03 crc kubenswrapper[4757]: I1006 15:15:03.748343 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4d63ca01-dff7-48a1-8f3d-73a0adc96b02-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "4d63ca01-dff7-48a1-8f3d-73a0adc96b02" (UID: "4d63ca01-dff7-48a1-8f3d-73a0adc96b02"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 06 15:15:03 crc kubenswrapper[4757]: I1006 15:15:03.748369 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4d63ca01-dff7-48a1-8f3d-73a0adc96b02-kube-api-access-7dzjp" (OuterVolumeSpecName: "kube-api-access-7dzjp") pod "4d63ca01-dff7-48a1-8f3d-73a0adc96b02" (UID: "4d63ca01-dff7-48a1-8f3d-73a0adc96b02"). InnerVolumeSpecName "kube-api-access-7dzjp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 15:15:03 crc kubenswrapper[4757]: I1006 15:15:03.839633 4757 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4d63ca01-dff7-48a1-8f3d-73a0adc96b02-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 06 15:15:03 crc kubenswrapper[4757]: I1006 15:15:03.839665 4757 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4d63ca01-dff7-48a1-8f3d-73a0adc96b02-config-volume\") on node \"crc\" DevicePath \"\"" Oct 06 15:15:03 crc kubenswrapper[4757]: I1006 15:15:03.839676 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7dzjp\" (UniqueName: \"kubernetes.io/projected/4d63ca01-dff7-48a1-8f3d-73a0adc96b02-kube-api-access-7dzjp\") on node \"crc\" DevicePath \"\"" Oct 06 15:15:04 crc kubenswrapper[4757]: I1006 15:15:04.323973 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29329395-mtmqt" event={"ID":"4d63ca01-dff7-48a1-8f3d-73a0adc96b02","Type":"ContainerDied","Data":"0b1731b8dba4861201051f0298dfe4a6455f1dd9368cb03ead1164f661bbb3cf"} Oct 06 15:15:04 crc kubenswrapper[4757]: I1006 15:15:04.324297 4757 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0b1731b8dba4861201051f0298dfe4a6455f1dd9368cb03ead1164f661bbb3cf" Oct 06 15:15:04 crc kubenswrapper[4757]: I1006 15:15:04.324033 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29329395-mtmqt" Oct 06 15:15:04 crc kubenswrapper[4757]: I1006 15:15:04.382079 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29329350-85dn6"] Oct 06 15:15:04 crc kubenswrapper[4757]: I1006 15:15:04.389329 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29329350-85dn6"] Oct 06 15:15:06 crc kubenswrapper[4757]: I1006 15:15:06.189559 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c2108ec2-dc1c-4ab0-9fd4-1f3eaa56701b" path="/var/lib/kubelet/pods/c2108ec2-dc1c-4ab0-9fd4-1f3eaa56701b/volumes" Oct 06 15:15:09 crc kubenswrapper[4757]: I1006 15:15:09.180663 4757 scope.go:117] "RemoveContainer" containerID="c9a01d3b8e651ed2a273ac73def6d0e86398bfbdd3b94e4d6d085ceec8dc0a3f" Oct 06 15:15:09 crc kubenswrapper[4757]: E1006 15:15:09.182504 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 15:15:15 crc kubenswrapper[4757]: I1006 15:15:15.925228 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_15a1c145fda98e6dff4d00edd620a8374d6690a1bd22f46b75c47a2ff9gxhn8_90ed9755-badd-4897-be59-4ffb24a37b83/util/0.log" Oct 06 15:15:16 crc kubenswrapper[4757]: I1006 15:15:16.085306 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_15a1c145fda98e6dff4d00edd620a8374d6690a1bd22f46b75c47a2ff9gxhn8_90ed9755-badd-4897-be59-4ffb24a37b83/util/0.log" Oct 06 15:15:16 crc kubenswrapper[4757]: I1006 15:15:16.136817 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_15a1c145fda98e6dff4d00edd620a8374d6690a1bd22f46b75c47a2ff9gxhn8_90ed9755-badd-4897-be59-4ffb24a37b83/pull/0.log" Oct 06 15:15:16 crc kubenswrapper[4757]: I1006 15:15:16.140236 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_15a1c145fda98e6dff4d00edd620a8374d6690a1bd22f46b75c47a2ff9gxhn8_90ed9755-badd-4897-be59-4ffb24a37b83/pull/0.log" Oct 06 15:15:16 crc kubenswrapper[4757]: I1006 15:15:16.301109 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_15a1c145fda98e6dff4d00edd620a8374d6690a1bd22f46b75c47a2ff9gxhn8_90ed9755-badd-4897-be59-4ffb24a37b83/pull/0.log" Oct 06 15:15:16 crc kubenswrapper[4757]: I1006 15:15:16.301228 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_15a1c145fda98e6dff4d00edd620a8374d6690a1bd22f46b75c47a2ff9gxhn8_90ed9755-badd-4897-be59-4ffb24a37b83/util/0.log" Oct 06 15:15:16 crc kubenswrapper[4757]: I1006 15:15:16.330654 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_15a1c145fda98e6dff4d00edd620a8374d6690a1bd22f46b75c47a2ff9gxhn8_90ed9755-badd-4897-be59-4ffb24a37b83/extract/0.log" Oct 06 15:15:16 crc kubenswrapper[4757]: I1006 15:15:16.527071 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-64f56ff694-bwzkj_44a67ef5-395a-4ba1-8572-847e4bc6e4a1/kube-rbac-proxy/0.log" Oct 06 15:15:16 crc kubenswrapper[4757]: I1006 15:15:16.547595 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-84bd8f6848-cm2d5_b539d43b-476d-46dc-944b-47ba64f84566/kube-rbac-proxy/0.log" Oct 06 15:15:16 crc kubenswrapper[4757]: I1006 15:15:16.570432 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-64f56ff694-bwzkj_44a67ef5-395a-4ba1-8572-847e4bc6e4a1/manager/0.log" Oct 06 15:15:16 crc kubenswrapper[4757]: I1006 15:15:16.768878 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-58d86cd59d-8dq6w_e6876774-af7e-4e5e-bed0-db5f2ac668b9/manager/0.log" Oct 06 15:15:16 crc kubenswrapper[4757]: I1006 15:15:16.770949 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-84bd8f6848-cm2d5_b539d43b-476d-46dc-944b-47ba64f84566/manager/0.log" Oct 06 15:15:16 crc kubenswrapper[4757]: I1006 15:15:16.809303 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-58d86cd59d-8dq6w_e6876774-af7e-4e5e-bed0-db5f2ac668b9/kube-rbac-proxy/0.log" Oct 06 15:15:16 crc kubenswrapper[4757]: I1006 15:15:16.951140 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-fd648f65-rbrjz_deeb9237-8930-41f6-94e5-bd012cca4f95/kube-rbac-proxy/0.log" Oct 06 15:15:16 crc kubenswrapper[4757]: I1006 15:15:16.994616 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-fd648f65-rbrjz_deeb9237-8930-41f6-94e5-bd012cca4f95/manager/0.log" Oct 06 15:15:17 crc kubenswrapper[4757]: I1006 15:15:17.130730 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-7ccfc8cf49-np2qg_5f9c96a5-1d49-48a6-89d4-56fdea632598/kube-rbac-proxy/0.log" Oct 06 15:15:17 crc kubenswrapper[4757]: I1006 15:15:17.182349 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-7ccfc8cf49-np2qg_5f9c96a5-1d49-48a6-89d4-56fdea632598/manager/0.log" Oct 06 15:15:17 crc kubenswrapper[4757]: I1006 15:15:17.203562 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-5b477879bc-7z4cc_28e3257e-3a28-4a92-8658-936b70dd1b79/kube-rbac-proxy/0.log" Oct 06 15:15:17 crc kubenswrapper[4757]: I1006 15:15:17.321902 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-5b477879bc-7z4cc_28e3257e-3a28-4a92-8658-936b70dd1b79/manager/0.log" Oct 06 15:15:17 crc kubenswrapper[4757]: I1006 15:15:17.405086 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-84788b6bc5-ghbgp_ec5d9f24-e11c-4966-b85d-cfb960db9568/kube-rbac-proxy/0.log" Oct 06 15:15:17 crc kubenswrapper[4757]: I1006 15:15:17.599425 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-84788b6bc5-ghbgp_ec5d9f24-e11c-4966-b85d-cfb960db9568/manager/0.log" Oct 06 15:15:17 crc kubenswrapper[4757]: I1006 15:15:17.609630 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-5467f8988c-w9lhz_904d882e-dbf9-4f3c-897c-6cacf5a38057/kube-rbac-proxy/0.log" Oct 06 15:15:17 crc kubenswrapper[4757]: I1006 15:15:17.676459 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-5467f8988c-w9lhz_904d882e-dbf9-4f3c-897c-6cacf5a38057/manager/0.log" Oct 06 15:15:17 crc kubenswrapper[4757]: I1006 15:15:17.791206 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-5b84cc7657-2lh74_370a8b4d-2c86-4920-91c9-19834ea66e0e/kube-rbac-proxy/0.log" Oct 06 15:15:17 crc kubenswrapper[4757]: I1006 15:15:17.874349 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-5b84cc7657-2lh74_370a8b4d-2c86-4920-91c9-19834ea66e0e/manager/0.log" Oct 06 15:15:17 crc kubenswrapper[4757]: I1006 15:15:17.941607 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-7cb48dbc-k7npw_0c353b7c-52c9-490a-ab96-7d47a1dae189/kube-rbac-proxy/0.log" Oct 06 15:15:17 crc kubenswrapper[4757]: I1006 15:15:17.986037 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-7cb48dbc-k7npw_0c353b7c-52c9-490a-ab96-7d47a1dae189/manager/0.log" Oct 06 15:15:18 crc kubenswrapper[4757]: I1006 15:15:18.092409 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-d6c9dc5bc-tsk8p_9f02bbb6-9980-4cb4-9811-5ab86eb2e6cc/kube-rbac-proxy/0.log" Oct 06 15:15:18 crc kubenswrapper[4757]: I1006 15:15:18.137503 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-d6c9dc5bc-tsk8p_9f02bbb6-9980-4cb4-9811-5ab86eb2e6cc/manager/0.log" Oct 06 15:15:18 crc kubenswrapper[4757]: I1006 15:15:18.247541 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-69b956fbf6-49zhk_ef26c912-a862-423b-8e2f-2ad84c392e16/kube-rbac-proxy/0.log" Oct 06 15:15:18 crc kubenswrapper[4757]: I1006 15:15:18.343596 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-69b956fbf6-49zhk_ef26c912-a862-423b-8e2f-2ad84c392e16/manager/0.log" Oct 06 15:15:18 crc kubenswrapper[4757]: I1006 15:15:18.383749 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-6c9b57c67-qqbjc_52d15369-93d1-4240-9c8a-22fc91e31b2b/kube-rbac-proxy/0.log" Oct 06 15:15:18 crc kubenswrapper[4757]: I1006 15:15:18.501424 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-6c9b57c67-qqbjc_52d15369-93d1-4240-9c8a-22fc91e31b2b/manager/0.log" Oct 06 15:15:18 crc kubenswrapper[4757]: I1006 15:15:18.560073 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-69f59f9d8-7qc5n_4b01c123-c126-49d3-a5f7-4f85d7d9466a/kube-rbac-proxy/0.log" Oct 06 15:15:18 crc kubenswrapper[4757]: I1006 15:15:18.606180 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-69f59f9d8-7qc5n_4b01c123-c126-49d3-a5f7-4f85d7d9466a/manager/0.log" Oct 06 15:15:18 crc kubenswrapper[4757]: I1006 15:15:18.766530 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-84784cd75d696n7_518b074b-8b71-41d4-9105-b13aa20e3901/manager/0.log" Oct 06 15:15:18 crc kubenswrapper[4757]: I1006 15:15:18.831515 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-84784cd75d696n7_518b074b-8b71-41d4-9105-b13aa20e3901/kube-rbac-proxy/0.log" Oct 06 15:15:18 crc kubenswrapper[4757]: I1006 15:15:18.867903 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-867844c698-w92jl_04761044-c4c0-4fcf-9229-4050ef0e64da/kube-rbac-proxy/0.log" Oct 06 15:15:19 crc kubenswrapper[4757]: I1006 15:15:19.048580 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-6975b4f4b9-44lxq_8582ecb4-98c5-4542-8ffc-d5ff7e8ce6d8/kube-rbac-proxy/0.log" Oct 06 15:15:19 crc kubenswrapper[4757]: I1006 15:15:19.276691 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-6975b4f4b9-44lxq_8582ecb4-98c5-4542-8ffc-d5ff7e8ce6d8/operator/0.log" Oct 06 15:15:19 crc kubenswrapper[4757]: I1006 15:15:19.319551 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-54d485fd9-kqt4h_b30674df-1f48-4850-ae2c-7c464a84afea/kube-rbac-proxy/0.log" Oct 06 15:15:19 crc kubenswrapper[4757]: I1006 15:15:19.322025 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-ddqvx_7164f700-e389-47c2-b3a7-2e4afe8ebc36/registry-server/0.log" Oct 06 15:15:19 crc kubenswrapper[4757]: I1006 15:15:19.535259 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-54d485fd9-kqt4h_b30674df-1f48-4850-ae2c-7c464a84afea/manager/0.log" Oct 06 15:15:19 crc kubenswrapper[4757]: I1006 15:15:19.569833 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-66f6d6849b-pwcvf_8c111ee9-f736-4f86-98b6-590b9adfa58f/manager/0.log" Oct 06 15:15:19 crc kubenswrapper[4757]: I1006 15:15:19.581715 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-66f6d6849b-pwcvf_8c111ee9-f736-4f86-98b6-590b9adfa58f/kube-rbac-proxy/0.log" Oct 06 15:15:19 crc kubenswrapper[4757]: I1006 15:15:19.846496 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-5f97d8c699-c7h67_a1958ae9-ac1a-4ec6-9801-4c9c48f3c37c/operator/0.log" Oct 06 15:15:19 crc kubenswrapper[4757]: I1006 15:15:19.852181 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-76d5577b-szvb9_045efd67-1f80-49d4-be81-46310d11b717/kube-rbac-proxy/0.log" Oct 06 15:15:20 crc kubenswrapper[4757]: I1006 15:15:20.053656 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-76d5577b-szvb9_045efd67-1f80-49d4-be81-46310d11b717/manager/0.log" Oct 06 15:15:20 crc kubenswrapper[4757]: I1006 15:15:20.068023 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-867844c698-w92jl_04761044-c4c0-4fcf-9229-4050ef0e64da/manager/0.log" Oct 06 15:15:20 crc kubenswrapper[4757]: I1006 15:15:20.069687 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-f589c7597-cnfmr_f328cb34-f1aa-49c7-8b79-c465fa4cd522/kube-rbac-proxy/0.log" Oct 06 15:15:20 crc kubenswrapper[4757]: I1006 15:15:20.116108 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-f589c7597-cnfmr_f328cb34-f1aa-49c7-8b79-c465fa4cd522/manager/0.log" Oct 06 15:15:20 crc kubenswrapper[4757]: I1006 15:15:20.235002 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-6bb6dcddc-g9hjb_37d8e975-46d7-46cd-8d20-4229c7ffaada/kube-rbac-proxy/0.log" Oct 06 15:15:20 crc kubenswrapper[4757]: I1006 15:15:20.239516 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-6bb6dcddc-g9hjb_37d8e975-46d7-46cd-8d20-4229c7ffaada/manager/0.log" Oct 06 15:15:20 crc kubenswrapper[4757]: I1006 15:15:20.309426 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-5d98cc5575-shmfd_fa3aa8e8-3594-4e41-afa7-d59a965cde23/kube-rbac-proxy/0.log" Oct 06 15:15:20 crc kubenswrapper[4757]: I1006 15:15:20.417044 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-5d98cc5575-shmfd_fa3aa8e8-3594-4e41-afa7-d59a965cde23/manager/0.log" Oct 06 15:15:22 crc kubenswrapper[4757]: I1006 15:15:22.186500 4757 scope.go:117] "RemoveContainer" containerID="c9a01d3b8e651ed2a273ac73def6d0e86398bfbdd3b94e4d6d085ceec8dc0a3f" Oct 06 15:15:22 crc kubenswrapper[4757]: E1006 15:15:22.189867 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 15:15:34 crc kubenswrapper[4757]: I1006 15:15:34.957877 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-bg97k_10a2c372-b600-4c78-a4d5-22f5f8c1e425/control-plane-machine-set-operator/0.log" Oct 06 15:15:35 crc kubenswrapper[4757]: I1006 15:15:35.143378 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-vtmh7_37719f06-ec78-403f-81c3-d67831d1ce01/machine-api-operator/0.log" Oct 06 15:15:35 crc kubenswrapper[4757]: I1006 15:15:35.155876 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-vtmh7_37719f06-ec78-403f-81c3-d67831d1ce01/kube-rbac-proxy/0.log" Oct 06 15:15:36 crc kubenswrapper[4757]: I1006 15:15:36.179974 4757 scope.go:117] "RemoveContainer" containerID="c9a01d3b8e651ed2a273ac73def6d0e86398bfbdd3b94e4d6d085ceec8dc0a3f" Oct 06 15:15:36 crc kubenswrapper[4757]: E1006 15:15:36.180575 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 15:15:38 crc kubenswrapper[4757]: I1006 15:15:38.529933 4757 scope.go:117] "RemoveContainer" containerID="118ce4b8f05c7c8614498377bd6cc066ae817cb5df2570545d087f27510a9248" Oct 06 15:15:47 crc kubenswrapper[4757]: I1006 15:15:47.037836 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-7d4cc89fcb-wl5f4_88d7e4e8-73b8-4d37-9e0d-3fc2cfc95528/cert-manager-controller/0.log" Oct 06 15:15:47 crc kubenswrapper[4757]: I1006 15:15:47.226789 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-d969966f-xx877_f261a0cf-2cfa-452a-9366-430ae1d09fac/cert-manager-webhook/0.log" Oct 06 15:15:47 crc kubenswrapper[4757]: I1006 15:15:47.232696 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7d9f95dbf-wz4lh_1e270c4f-567e-4a7f-8693-0fc471447e47/cert-manager-cainjector/0.log" Oct 06 15:15:51 crc kubenswrapper[4757]: I1006 15:15:51.180873 4757 scope.go:117] "RemoveContainer" containerID="c9a01d3b8e651ed2a273ac73def6d0e86398bfbdd3b94e4d6d085ceec8dc0a3f" Oct 06 15:15:51 crc kubenswrapper[4757]: E1006 15:15:51.181507 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 15:15:58 crc kubenswrapper[4757]: I1006 15:15:58.621957 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-6b874cbd85-ccdp9_c6cb2b63-6ae0-4d31-8b00-de6ffff21df7/nmstate-console-plugin/0.log" Oct 06 15:15:58 crc kubenswrapper[4757]: I1006 15:15:58.846804 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-xw9cw_d715a33c-ce03-4762-8091-b73d2b53f929/nmstate-handler/0.log" Oct 06 15:15:58 crc kubenswrapper[4757]: I1006 15:15:58.932900 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-fdff9cb8d-lhkl6_54800f67-40f0-4ea1-bc26-76baed5d2663/kube-rbac-proxy/0.log" Oct 06 15:15:58 crc kubenswrapper[4757]: I1006 15:15:58.980109 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-fdff9cb8d-lhkl6_54800f67-40f0-4ea1-bc26-76baed5d2663/nmstate-metrics/0.log" Oct 06 15:15:59 crc kubenswrapper[4757]: I1006 15:15:59.043308 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-858ddd8f98-skdq9_28c42384-fa75-491c-a8a8-1b66cacf2c04/nmstate-operator/0.log" Oct 06 15:15:59 crc kubenswrapper[4757]: I1006 15:15:59.215900 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-6cdbc54649-db26x_eb37b835-d99b-4893-a0a2-659afa1391e4/nmstate-webhook/0.log" Oct 06 15:16:04 crc kubenswrapper[4757]: I1006 15:16:04.180179 4757 scope.go:117] "RemoveContainer" containerID="c9a01d3b8e651ed2a273ac73def6d0e86398bfbdd3b94e4d6d085ceec8dc0a3f" Oct 06 15:16:04 crc kubenswrapper[4757]: E1006 15:16:04.180826 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 15:16:13 crc kubenswrapper[4757]: I1006 15:16:13.107183 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-68d546b9d8-8sfhp_7ad4c4b7-577f-4996-a45d-36026559eb4d/kube-rbac-proxy/0.log" Oct 06 15:16:13 crc kubenswrapper[4757]: I1006 15:16:13.291951 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-q49jh_07dfce6e-a6a9-44b4-a3c0-2f8778c6309e/cp-frr-files/0.log" Oct 06 15:16:13 crc kubenswrapper[4757]: I1006 15:16:13.487151 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-68d546b9d8-8sfhp_7ad4c4b7-577f-4996-a45d-36026559eb4d/controller/0.log" Oct 06 15:16:13 crc kubenswrapper[4757]: I1006 15:16:13.634153 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-q49jh_07dfce6e-a6a9-44b4-a3c0-2f8778c6309e/cp-metrics/0.log" Oct 06 15:16:13 crc kubenswrapper[4757]: I1006 15:16:13.648650 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-q49jh_07dfce6e-a6a9-44b4-a3c0-2f8778c6309e/cp-frr-files/0.log" Oct 06 15:16:13 crc kubenswrapper[4757]: I1006 15:16:13.703953 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-q49jh_07dfce6e-a6a9-44b4-a3c0-2f8778c6309e/cp-reloader/0.log" Oct 06 15:16:13 crc kubenswrapper[4757]: I1006 15:16:13.704116 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-q49jh_07dfce6e-a6a9-44b4-a3c0-2f8778c6309e/cp-reloader/0.log" Oct 06 15:16:13 crc kubenswrapper[4757]: I1006 15:16:13.942367 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-q49jh_07dfce6e-a6a9-44b4-a3c0-2f8778c6309e/cp-frr-files/0.log" Oct 06 15:16:13 crc kubenswrapper[4757]: I1006 15:16:13.979206 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-q49jh_07dfce6e-a6a9-44b4-a3c0-2f8778c6309e/cp-metrics/0.log" Oct 06 15:16:13 crc kubenswrapper[4757]: I1006 15:16:13.995378 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-q49jh_07dfce6e-a6a9-44b4-a3c0-2f8778c6309e/cp-metrics/0.log" Oct 06 15:16:13 crc kubenswrapper[4757]: I1006 15:16:13.999341 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-q49jh_07dfce6e-a6a9-44b4-a3c0-2f8778c6309e/cp-reloader/0.log" Oct 06 15:16:14 crc kubenswrapper[4757]: I1006 15:16:14.149036 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-q49jh_07dfce6e-a6a9-44b4-a3c0-2f8778c6309e/cp-frr-files/0.log" Oct 06 15:16:14 crc kubenswrapper[4757]: I1006 15:16:14.155047 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-q49jh_07dfce6e-a6a9-44b4-a3c0-2f8778c6309e/cp-metrics/0.log" Oct 06 15:16:14 crc kubenswrapper[4757]: I1006 15:16:14.166132 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-q49jh_07dfce6e-a6a9-44b4-a3c0-2f8778c6309e/controller/0.log" Oct 06 15:16:14 crc kubenswrapper[4757]: I1006 15:16:14.193670 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-q49jh_07dfce6e-a6a9-44b4-a3c0-2f8778c6309e/cp-reloader/0.log" Oct 06 15:16:14 crc kubenswrapper[4757]: I1006 15:16:14.350838 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-q49jh_07dfce6e-a6a9-44b4-a3c0-2f8778c6309e/frr-metrics/0.log" Oct 06 15:16:14 crc kubenswrapper[4757]: I1006 15:16:14.358402 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-q49jh_07dfce6e-a6a9-44b4-a3c0-2f8778c6309e/kube-rbac-proxy/0.log" Oct 06 15:16:14 crc kubenswrapper[4757]: I1006 15:16:14.443861 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-q49jh_07dfce6e-a6a9-44b4-a3c0-2f8778c6309e/kube-rbac-proxy-frr/0.log" Oct 06 15:16:14 crc kubenswrapper[4757]: I1006 15:16:14.578798 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-q49jh_07dfce6e-a6a9-44b4-a3c0-2f8778c6309e/reloader/0.log" Oct 06 15:16:14 crc kubenswrapper[4757]: I1006 15:16:14.650038 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-64bf5d555-f9gxl_0f74c411-c34f-47f6-aab7-3c80d0e9cbeb/frr-k8s-webhook-server/0.log" Oct 06 15:16:14 crc kubenswrapper[4757]: I1006 15:16:14.883975 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-77db4df477-bh7dw_146e21cb-4669-41fc-9356-be297ab190fc/manager/0.log" Oct 06 15:16:15 crc kubenswrapper[4757]: I1006 15:16:15.040276 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-65787fbbf-zc897_75252c24-aec1-4169-9b9b-c7041274bb4d/webhook-server/0.log" Oct 06 15:16:15 crc kubenswrapper[4757]: I1006 15:16:15.139031 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-2cj4z_f17e2038-326e-47e1-93c1-5d691c69bd16/kube-rbac-proxy/0.log" Oct 06 15:16:15 crc kubenswrapper[4757]: I1006 15:16:15.776083 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-2cj4z_f17e2038-326e-47e1-93c1-5d691c69bd16/speaker/0.log" Oct 06 15:16:16 crc kubenswrapper[4757]: I1006 15:16:16.079922 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-q49jh_07dfce6e-a6a9-44b4-a3c0-2f8778c6309e/frr/0.log" Oct 06 15:16:19 crc kubenswrapper[4757]: I1006 15:16:19.180666 4757 scope.go:117] "RemoveContainer" containerID="c9a01d3b8e651ed2a273ac73def6d0e86398bfbdd3b94e4d6d085ceec8dc0a3f" Oct 06 15:16:19 crc kubenswrapper[4757]: E1006 15:16:19.182681 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 15:16:27 crc kubenswrapper[4757]: I1006 15:16:27.079076 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69kqvhp_e7df9b75-6121-46ca-89f6-963c9665b8cf/util/0.log" Oct 06 15:16:27 crc kubenswrapper[4757]: I1006 15:16:27.241519 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69kqvhp_e7df9b75-6121-46ca-89f6-963c9665b8cf/util/0.log" Oct 06 15:16:27 crc kubenswrapper[4757]: I1006 15:16:27.258399 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69kqvhp_e7df9b75-6121-46ca-89f6-963c9665b8cf/pull/0.log" Oct 06 15:16:27 crc kubenswrapper[4757]: I1006 15:16:27.292096 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69kqvhp_e7df9b75-6121-46ca-89f6-963c9665b8cf/pull/0.log" Oct 06 15:16:27 crc kubenswrapper[4757]: I1006 15:16:27.439056 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69kqvhp_e7df9b75-6121-46ca-89f6-963c9665b8cf/util/0.log" Oct 06 15:16:27 crc kubenswrapper[4757]: I1006 15:16:27.456985 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69kqvhp_e7df9b75-6121-46ca-89f6-963c9665b8cf/pull/0.log" Oct 06 15:16:27 crc kubenswrapper[4757]: I1006 15:16:27.463208 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69kqvhp_e7df9b75-6121-46ca-89f6-963c9665b8cf/extract/0.log" Oct 06 15:16:27 crc kubenswrapper[4757]: I1006 15:16:27.598396 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2f5lgl_e2bebc59-e950-44d3-9b34-e9ae757735ed/util/0.log" Oct 06 15:16:27 crc kubenswrapper[4757]: I1006 15:16:27.781367 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2f5lgl_e2bebc59-e950-44d3-9b34-e9ae757735ed/util/0.log" Oct 06 15:16:27 crc kubenswrapper[4757]: I1006 15:16:27.809192 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2f5lgl_e2bebc59-e950-44d3-9b34-e9ae757735ed/pull/0.log" Oct 06 15:16:27 crc kubenswrapper[4757]: I1006 15:16:27.809247 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2f5lgl_e2bebc59-e950-44d3-9b34-e9ae757735ed/pull/0.log" Oct 06 15:16:27 crc kubenswrapper[4757]: I1006 15:16:27.962437 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2f5lgl_e2bebc59-e950-44d3-9b34-e9ae757735ed/extract/0.log" Oct 06 15:16:27 crc kubenswrapper[4757]: I1006 15:16:27.974837 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2f5lgl_e2bebc59-e950-44d3-9b34-e9ae757735ed/util/0.log" Oct 06 15:16:27 crc kubenswrapper[4757]: I1006 15:16:27.998891 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d2f5lgl_e2bebc59-e950-44d3-9b34-e9ae757735ed/pull/0.log" Oct 06 15:16:28 crc kubenswrapper[4757]: I1006 15:16:28.137139 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-5ctws_bc03633f-fb13-433b-a4a5-6cf688d6d60e/extract-utilities/0.log" Oct 06 15:16:28 crc kubenswrapper[4757]: I1006 15:16:28.290081 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-5ctws_bc03633f-fb13-433b-a4a5-6cf688d6d60e/extract-utilities/0.log" Oct 06 15:16:28 crc kubenswrapper[4757]: I1006 15:16:28.316224 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-5ctws_bc03633f-fb13-433b-a4a5-6cf688d6d60e/extract-content/0.log" Oct 06 15:16:28 crc kubenswrapper[4757]: I1006 15:16:28.324914 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-5ctws_bc03633f-fb13-433b-a4a5-6cf688d6d60e/extract-content/0.log" Oct 06 15:16:28 crc kubenswrapper[4757]: I1006 15:16:28.473763 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-5ctws_bc03633f-fb13-433b-a4a5-6cf688d6d60e/extract-utilities/0.log" Oct 06 15:16:28 crc kubenswrapper[4757]: I1006 15:16:28.534596 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-5ctws_bc03633f-fb13-433b-a4a5-6cf688d6d60e/extract-content/0.log" Oct 06 15:16:28 crc kubenswrapper[4757]: I1006 15:16:28.690877 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-6hsbt_055ac91e-78ce-47e5-91b2-cca82eb82bee/extract-utilities/0.log" Oct 06 15:16:28 crc kubenswrapper[4757]: I1006 15:16:28.903708 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-5ctws_bc03633f-fb13-433b-a4a5-6cf688d6d60e/registry-server/0.log" Oct 06 15:16:28 crc kubenswrapper[4757]: I1006 15:16:28.963787 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-6hsbt_055ac91e-78ce-47e5-91b2-cca82eb82bee/extract-utilities/0.log" Oct 06 15:16:28 crc kubenswrapper[4757]: I1006 15:16:28.973466 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-6hsbt_055ac91e-78ce-47e5-91b2-cca82eb82bee/extract-content/0.log" Oct 06 15:16:28 crc kubenswrapper[4757]: I1006 15:16:28.979402 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-6hsbt_055ac91e-78ce-47e5-91b2-cca82eb82bee/extract-content/0.log" Oct 06 15:16:29 crc kubenswrapper[4757]: I1006 15:16:29.116406 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-6hsbt_055ac91e-78ce-47e5-91b2-cca82eb82bee/extract-utilities/0.log" Oct 06 15:16:29 crc kubenswrapper[4757]: I1006 15:16:29.256380 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-6hsbt_055ac91e-78ce-47e5-91b2-cca82eb82bee/extract-content/0.log" Oct 06 15:16:29 crc kubenswrapper[4757]: I1006 15:16:29.351255 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cnfqkv_1f413cfe-e073-451c-815c-a246cea1099e/util/0.log" Oct 06 15:16:29 crc kubenswrapper[4757]: I1006 15:16:29.560124 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cnfqkv_1f413cfe-e073-451c-815c-a246cea1099e/pull/0.log" Oct 06 15:16:29 crc kubenswrapper[4757]: I1006 15:16:29.567894 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cnfqkv_1f413cfe-e073-451c-815c-a246cea1099e/pull/0.log" Oct 06 15:16:29 crc kubenswrapper[4757]: I1006 15:16:29.602978 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cnfqkv_1f413cfe-e073-451c-815c-a246cea1099e/util/0.log" Oct 06 15:16:29 crc kubenswrapper[4757]: I1006 15:16:29.811211 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cnfqkv_1f413cfe-e073-451c-815c-a246cea1099e/util/0.log" Oct 06 15:16:29 crc kubenswrapper[4757]: I1006 15:16:29.818210 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cnfqkv_1f413cfe-e073-451c-815c-a246cea1099e/extract/0.log" Oct 06 15:16:29 crc kubenswrapper[4757]: I1006 15:16:29.836466 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835cnfqkv_1f413cfe-e073-451c-815c-a246cea1099e/pull/0.log" Oct 06 15:16:30 crc kubenswrapper[4757]: I1006 15:16:30.105838 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-gfrpz_3c7cc53d-90b4-4fc3-8993-2648eb34abf2/marketplace-operator/0.log" Oct 06 15:16:30 crc kubenswrapper[4757]: I1006 15:16:30.181098 4757 scope.go:117] "RemoveContainer" containerID="c9a01d3b8e651ed2a273ac73def6d0e86398bfbdd3b94e4d6d085ceec8dc0a3f" Oct 06 15:16:30 crc kubenswrapper[4757]: E1006 15:16:30.181388 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 15:16:30 crc kubenswrapper[4757]: I1006 15:16:30.181729 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-6hsbt_055ac91e-78ce-47e5-91b2-cca82eb82bee/registry-server/0.log" Oct 06 15:16:30 crc kubenswrapper[4757]: I1006 15:16:30.229965 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-hfhqn_2803bf75-1028-41a4-9533-e4ec37c0872a/extract-utilities/0.log" Oct 06 15:16:30 crc kubenswrapper[4757]: I1006 15:16:30.408123 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-hfhqn_2803bf75-1028-41a4-9533-e4ec37c0872a/extract-content/0.log" Oct 06 15:16:30 crc kubenswrapper[4757]: I1006 15:16:30.426362 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-hfhqn_2803bf75-1028-41a4-9533-e4ec37c0872a/extract-utilities/0.log" Oct 06 15:16:30 crc kubenswrapper[4757]: I1006 15:16:30.447929 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-hfhqn_2803bf75-1028-41a4-9533-e4ec37c0872a/extract-content/0.log" Oct 06 15:16:30 crc kubenswrapper[4757]: I1006 15:16:30.593892 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-hfhqn_2803bf75-1028-41a4-9533-e4ec37c0872a/extract-content/0.log" Oct 06 15:16:30 crc kubenswrapper[4757]: I1006 15:16:30.607087 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-hfhqn_2803bf75-1028-41a4-9533-e4ec37c0872a/extract-utilities/0.log" Oct 06 15:16:30 crc kubenswrapper[4757]: I1006 15:16:30.819986 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-ph72x_da902388-6f72-4fc4-9a8e-da911b563ecb/extract-utilities/0.log" Oct 06 15:16:30 crc kubenswrapper[4757]: I1006 15:16:30.843530 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-hfhqn_2803bf75-1028-41a4-9533-e4ec37c0872a/registry-server/0.log" Oct 06 15:16:31 crc kubenswrapper[4757]: I1006 15:16:31.014718 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-ph72x_da902388-6f72-4fc4-9a8e-da911b563ecb/extract-content/0.log" Oct 06 15:16:31 crc kubenswrapper[4757]: I1006 15:16:31.026573 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-ph72x_da902388-6f72-4fc4-9a8e-da911b563ecb/extract-content/0.log" Oct 06 15:16:31 crc kubenswrapper[4757]: I1006 15:16:31.033128 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-ph72x_da902388-6f72-4fc4-9a8e-da911b563ecb/extract-utilities/0.log" Oct 06 15:16:31 crc kubenswrapper[4757]: I1006 15:16:31.168365 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-ph72x_da902388-6f72-4fc4-9a8e-da911b563ecb/extract-content/0.log" Oct 06 15:16:31 crc kubenswrapper[4757]: I1006 15:16:31.210183 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-ph72x_da902388-6f72-4fc4-9a8e-da911b563ecb/extract-utilities/0.log" Oct 06 15:16:31 crc kubenswrapper[4757]: I1006 15:16:31.638684 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-ph72x_da902388-6f72-4fc4-9a8e-da911b563ecb/registry-server/0.log" Oct 06 15:16:44 crc kubenswrapper[4757]: I1006 15:16:44.180376 4757 scope.go:117] "RemoveContainer" containerID="c9a01d3b8e651ed2a273ac73def6d0e86398bfbdd3b94e4d6d085ceec8dc0a3f" Oct 06 15:16:44 crc kubenswrapper[4757]: E1006 15:16:44.181134 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 15:16:47 crc kubenswrapper[4757]: I1006 15:16:47.893765 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-7n9hl"] Oct 06 15:16:47 crc kubenswrapper[4757]: E1006 15:16:47.894604 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d63ca01-dff7-48a1-8f3d-73a0adc96b02" containerName="collect-profiles" Oct 06 15:16:47 crc kubenswrapper[4757]: I1006 15:16:47.894617 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d63ca01-dff7-48a1-8f3d-73a0adc96b02" containerName="collect-profiles" Oct 06 15:16:47 crc kubenswrapper[4757]: I1006 15:16:47.894771 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="4d63ca01-dff7-48a1-8f3d-73a0adc96b02" containerName="collect-profiles" Oct 06 15:16:47 crc kubenswrapper[4757]: I1006 15:16:47.895901 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7n9hl" Oct 06 15:16:47 crc kubenswrapper[4757]: I1006 15:16:47.910480 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-7n9hl"] Oct 06 15:16:47 crc kubenswrapper[4757]: I1006 15:16:47.979443 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7726637f-1e62-4443-a074-bc56f793171f-utilities\") pod \"community-operators-7n9hl\" (UID: \"7726637f-1e62-4443-a074-bc56f793171f\") " pod="openshift-marketplace/community-operators-7n9hl" Oct 06 15:16:47 crc kubenswrapper[4757]: I1006 15:16:47.979496 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fn267\" (UniqueName: \"kubernetes.io/projected/7726637f-1e62-4443-a074-bc56f793171f-kube-api-access-fn267\") pod \"community-operators-7n9hl\" (UID: \"7726637f-1e62-4443-a074-bc56f793171f\") " pod="openshift-marketplace/community-operators-7n9hl" Oct 06 15:16:47 crc kubenswrapper[4757]: I1006 15:16:47.979523 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7726637f-1e62-4443-a074-bc56f793171f-catalog-content\") pod \"community-operators-7n9hl\" (UID: \"7726637f-1e62-4443-a074-bc56f793171f\") " pod="openshift-marketplace/community-operators-7n9hl" Oct 06 15:16:48 crc kubenswrapper[4757]: I1006 15:16:48.086351 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7726637f-1e62-4443-a074-bc56f793171f-utilities\") pod \"community-operators-7n9hl\" (UID: \"7726637f-1e62-4443-a074-bc56f793171f\") " pod="openshift-marketplace/community-operators-7n9hl" Oct 06 15:16:48 crc kubenswrapper[4757]: I1006 15:16:48.086614 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fn267\" (UniqueName: \"kubernetes.io/projected/7726637f-1e62-4443-a074-bc56f793171f-kube-api-access-fn267\") pod \"community-operators-7n9hl\" (UID: \"7726637f-1e62-4443-a074-bc56f793171f\") " pod="openshift-marketplace/community-operators-7n9hl" Oct 06 15:16:48 crc kubenswrapper[4757]: I1006 15:16:48.086651 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7726637f-1e62-4443-a074-bc56f793171f-catalog-content\") pod \"community-operators-7n9hl\" (UID: \"7726637f-1e62-4443-a074-bc56f793171f\") " pod="openshift-marketplace/community-operators-7n9hl" Oct 06 15:16:48 crc kubenswrapper[4757]: I1006 15:16:48.087445 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7726637f-1e62-4443-a074-bc56f793171f-catalog-content\") pod \"community-operators-7n9hl\" (UID: \"7726637f-1e62-4443-a074-bc56f793171f\") " pod="openshift-marketplace/community-operators-7n9hl" Oct 06 15:16:48 crc kubenswrapper[4757]: I1006 15:16:48.087770 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7726637f-1e62-4443-a074-bc56f793171f-utilities\") pod \"community-operators-7n9hl\" (UID: \"7726637f-1e62-4443-a074-bc56f793171f\") " pod="openshift-marketplace/community-operators-7n9hl" Oct 06 15:16:48 crc kubenswrapper[4757]: I1006 15:16:48.117310 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fn267\" (UniqueName: \"kubernetes.io/projected/7726637f-1e62-4443-a074-bc56f793171f-kube-api-access-fn267\") pod \"community-operators-7n9hl\" (UID: \"7726637f-1e62-4443-a074-bc56f793171f\") " pod="openshift-marketplace/community-operators-7n9hl" Oct 06 15:16:48 crc kubenswrapper[4757]: I1006 15:16:48.215211 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7n9hl" Oct 06 15:16:49 crc kubenswrapper[4757]: I1006 15:16:49.337820 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-7n9hl"] Oct 06 15:16:50 crc kubenswrapper[4757]: I1006 15:16:50.214143 4757 generic.go:334] "Generic (PLEG): container finished" podID="7726637f-1e62-4443-a074-bc56f793171f" containerID="b3479f22dfd2a5e1740507e35e73af7e872b1980b9dfdee3880aa54f474e5cf6" exitCode=0 Oct 06 15:16:50 crc kubenswrapper[4757]: I1006 15:16:50.214250 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7n9hl" event={"ID":"7726637f-1e62-4443-a074-bc56f793171f","Type":"ContainerDied","Data":"b3479f22dfd2a5e1740507e35e73af7e872b1980b9dfdee3880aa54f474e5cf6"} Oct 06 15:16:50 crc kubenswrapper[4757]: I1006 15:16:50.214401 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7n9hl" event={"ID":"7726637f-1e62-4443-a074-bc56f793171f","Type":"ContainerStarted","Data":"7c7bbe5c95415e302fda36d97b8001103f9a60483a3f24998a46de20e99b0752"} Oct 06 15:16:52 crc kubenswrapper[4757]: I1006 15:16:52.229026 4757 generic.go:334] "Generic (PLEG): container finished" podID="7726637f-1e62-4443-a074-bc56f793171f" containerID="170765fbb8a6f8c0c39a912127a0d96f16842c1b0229f1dcc55737eb50d727d3" exitCode=0 Oct 06 15:16:52 crc kubenswrapper[4757]: I1006 15:16:52.229232 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7n9hl" event={"ID":"7726637f-1e62-4443-a074-bc56f793171f","Type":"ContainerDied","Data":"170765fbb8a6f8c0c39a912127a0d96f16842c1b0229f1dcc55737eb50d727d3"} Oct 06 15:16:53 crc kubenswrapper[4757]: I1006 15:16:53.243695 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7n9hl" event={"ID":"7726637f-1e62-4443-a074-bc56f793171f","Type":"ContainerStarted","Data":"2a3afed59e02adf0ce9e76c059ce74319cde6c8feff2fc4c1e3a50d37c42a4ad"} Oct 06 15:16:53 crc kubenswrapper[4757]: I1006 15:16:53.272817 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-7n9hl" podStartSLOduration=3.458876213 podStartE2EDuration="6.272799871s" podCreationTimestamp="2025-10-06 15:16:47 +0000 UTC" firstStartedPulling="2025-10-06 15:16:50.215737946 +0000 UTC m=+5898.713056473" lastFinishedPulling="2025-10-06 15:16:53.029661594 +0000 UTC m=+5901.526980131" observedRunningTime="2025-10-06 15:16:53.265217788 +0000 UTC m=+5901.762536325" watchObservedRunningTime="2025-10-06 15:16:53.272799871 +0000 UTC m=+5901.770118408" Oct 06 15:16:57 crc kubenswrapper[4757]: I1006 15:16:57.180235 4757 scope.go:117] "RemoveContainer" containerID="c9a01d3b8e651ed2a273ac73def6d0e86398bfbdd3b94e4d6d085ceec8dc0a3f" Oct 06 15:16:57 crc kubenswrapper[4757]: E1006 15:16:57.180990 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 15:16:58 crc kubenswrapper[4757]: I1006 15:16:58.215995 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-7n9hl" Oct 06 15:16:58 crc kubenswrapper[4757]: I1006 15:16:58.216338 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-7n9hl" Oct 06 15:16:58 crc kubenswrapper[4757]: I1006 15:16:58.258998 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-7n9hl" Oct 06 15:16:58 crc kubenswrapper[4757]: I1006 15:16:58.329205 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-7n9hl" Oct 06 15:16:58 crc kubenswrapper[4757]: I1006 15:16:58.487719 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-7n9hl"] Oct 06 15:17:00 crc kubenswrapper[4757]: I1006 15:17:00.064271 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-phk2f"] Oct 06 15:17:00 crc kubenswrapper[4757]: I1006 15:17:00.070540 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-phk2f"] Oct 06 15:17:00 crc kubenswrapper[4757]: I1006 15:17:00.191373 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e6d9f5c5-06c1-4fdf-99be-31cd1bc00394" path="/var/lib/kubelet/pods/e6d9f5c5-06c1-4fdf-99be-31cd1bc00394/volumes" Oct 06 15:17:00 crc kubenswrapper[4757]: I1006 15:17:00.304713 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-7n9hl" podUID="7726637f-1e62-4443-a074-bc56f793171f" containerName="registry-server" containerID="cri-o://2a3afed59e02adf0ce9e76c059ce74319cde6c8feff2fc4c1e3a50d37c42a4ad" gracePeriod=2 Oct 06 15:17:00 crc kubenswrapper[4757]: I1006 15:17:00.763907 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7n9hl" Oct 06 15:17:00 crc kubenswrapper[4757]: I1006 15:17:00.925415 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7726637f-1e62-4443-a074-bc56f793171f-utilities\") pod \"7726637f-1e62-4443-a074-bc56f793171f\" (UID: \"7726637f-1e62-4443-a074-bc56f793171f\") " Oct 06 15:17:00 crc kubenswrapper[4757]: I1006 15:17:00.925491 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fn267\" (UniqueName: \"kubernetes.io/projected/7726637f-1e62-4443-a074-bc56f793171f-kube-api-access-fn267\") pod \"7726637f-1e62-4443-a074-bc56f793171f\" (UID: \"7726637f-1e62-4443-a074-bc56f793171f\") " Oct 06 15:17:00 crc kubenswrapper[4757]: I1006 15:17:00.925688 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7726637f-1e62-4443-a074-bc56f793171f-catalog-content\") pod \"7726637f-1e62-4443-a074-bc56f793171f\" (UID: \"7726637f-1e62-4443-a074-bc56f793171f\") " Oct 06 15:17:00 crc kubenswrapper[4757]: I1006 15:17:00.926415 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7726637f-1e62-4443-a074-bc56f793171f-utilities" (OuterVolumeSpecName: "utilities") pod "7726637f-1e62-4443-a074-bc56f793171f" (UID: "7726637f-1e62-4443-a074-bc56f793171f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 15:17:00 crc kubenswrapper[4757]: I1006 15:17:00.946844 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7726637f-1e62-4443-a074-bc56f793171f-kube-api-access-fn267" (OuterVolumeSpecName: "kube-api-access-fn267") pod "7726637f-1e62-4443-a074-bc56f793171f" (UID: "7726637f-1e62-4443-a074-bc56f793171f"). InnerVolumeSpecName "kube-api-access-fn267". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 15:17:00 crc kubenswrapper[4757]: I1006 15:17:00.975773 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7726637f-1e62-4443-a074-bc56f793171f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7726637f-1e62-4443-a074-bc56f793171f" (UID: "7726637f-1e62-4443-a074-bc56f793171f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 15:17:01 crc kubenswrapper[4757]: I1006 15:17:01.028212 4757 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7726637f-1e62-4443-a074-bc56f793171f-utilities\") on node \"crc\" DevicePath \"\"" Oct 06 15:17:01 crc kubenswrapper[4757]: I1006 15:17:01.028242 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fn267\" (UniqueName: \"kubernetes.io/projected/7726637f-1e62-4443-a074-bc56f793171f-kube-api-access-fn267\") on node \"crc\" DevicePath \"\"" Oct 06 15:17:01 crc kubenswrapper[4757]: I1006 15:17:01.028251 4757 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7726637f-1e62-4443-a074-bc56f793171f-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 06 15:17:01 crc kubenswrapper[4757]: I1006 15:17:01.314392 4757 generic.go:334] "Generic (PLEG): container finished" podID="7726637f-1e62-4443-a074-bc56f793171f" containerID="2a3afed59e02adf0ce9e76c059ce74319cde6c8feff2fc4c1e3a50d37c42a4ad" exitCode=0 Oct 06 15:17:01 crc kubenswrapper[4757]: I1006 15:17:01.314432 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7n9hl" event={"ID":"7726637f-1e62-4443-a074-bc56f793171f","Type":"ContainerDied","Data":"2a3afed59e02adf0ce9e76c059ce74319cde6c8feff2fc4c1e3a50d37c42a4ad"} Oct 06 15:17:01 crc kubenswrapper[4757]: I1006 15:17:01.314464 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7n9hl" event={"ID":"7726637f-1e62-4443-a074-bc56f793171f","Type":"ContainerDied","Data":"7c7bbe5c95415e302fda36d97b8001103f9a60483a3f24998a46de20e99b0752"} Oct 06 15:17:01 crc kubenswrapper[4757]: I1006 15:17:01.314481 4757 scope.go:117] "RemoveContainer" containerID="2a3afed59e02adf0ce9e76c059ce74319cde6c8feff2fc4c1e3a50d37c42a4ad" Oct 06 15:17:01 crc kubenswrapper[4757]: I1006 15:17:01.315544 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7n9hl" Oct 06 15:17:01 crc kubenswrapper[4757]: I1006 15:17:01.333015 4757 scope.go:117] "RemoveContainer" containerID="170765fbb8a6f8c0c39a912127a0d96f16842c1b0229f1dcc55737eb50d727d3" Oct 06 15:17:01 crc kubenswrapper[4757]: I1006 15:17:01.356361 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-7n9hl"] Oct 06 15:17:01 crc kubenswrapper[4757]: I1006 15:17:01.360671 4757 scope.go:117] "RemoveContainer" containerID="b3479f22dfd2a5e1740507e35e73af7e872b1980b9dfdee3880aa54f474e5cf6" Oct 06 15:17:01 crc kubenswrapper[4757]: I1006 15:17:01.364246 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-7n9hl"] Oct 06 15:17:01 crc kubenswrapper[4757]: I1006 15:17:01.396299 4757 scope.go:117] "RemoveContainer" containerID="2a3afed59e02adf0ce9e76c059ce74319cde6c8feff2fc4c1e3a50d37c42a4ad" Oct 06 15:17:01 crc kubenswrapper[4757]: E1006 15:17:01.396961 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2a3afed59e02adf0ce9e76c059ce74319cde6c8feff2fc4c1e3a50d37c42a4ad\": container with ID starting with 2a3afed59e02adf0ce9e76c059ce74319cde6c8feff2fc4c1e3a50d37c42a4ad not found: ID does not exist" containerID="2a3afed59e02adf0ce9e76c059ce74319cde6c8feff2fc4c1e3a50d37c42a4ad" Oct 06 15:17:01 crc kubenswrapper[4757]: I1006 15:17:01.396999 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2a3afed59e02adf0ce9e76c059ce74319cde6c8feff2fc4c1e3a50d37c42a4ad"} err="failed to get container status \"2a3afed59e02adf0ce9e76c059ce74319cde6c8feff2fc4c1e3a50d37c42a4ad\": rpc error: code = NotFound desc = could not find container \"2a3afed59e02adf0ce9e76c059ce74319cde6c8feff2fc4c1e3a50d37c42a4ad\": container with ID starting with 2a3afed59e02adf0ce9e76c059ce74319cde6c8feff2fc4c1e3a50d37c42a4ad not found: ID does not exist" Oct 06 15:17:01 crc kubenswrapper[4757]: I1006 15:17:01.397025 4757 scope.go:117] "RemoveContainer" containerID="170765fbb8a6f8c0c39a912127a0d96f16842c1b0229f1dcc55737eb50d727d3" Oct 06 15:17:01 crc kubenswrapper[4757]: E1006 15:17:01.397403 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"170765fbb8a6f8c0c39a912127a0d96f16842c1b0229f1dcc55737eb50d727d3\": container with ID starting with 170765fbb8a6f8c0c39a912127a0d96f16842c1b0229f1dcc55737eb50d727d3 not found: ID does not exist" containerID="170765fbb8a6f8c0c39a912127a0d96f16842c1b0229f1dcc55737eb50d727d3" Oct 06 15:17:01 crc kubenswrapper[4757]: I1006 15:17:01.397434 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"170765fbb8a6f8c0c39a912127a0d96f16842c1b0229f1dcc55737eb50d727d3"} err="failed to get container status \"170765fbb8a6f8c0c39a912127a0d96f16842c1b0229f1dcc55737eb50d727d3\": rpc error: code = NotFound desc = could not find container \"170765fbb8a6f8c0c39a912127a0d96f16842c1b0229f1dcc55737eb50d727d3\": container with ID starting with 170765fbb8a6f8c0c39a912127a0d96f16842c1b0229f1dcc55737eb50d727d3 not found: ID does not exist" Oct 06 15:17:01 crc kubenswrapper[4757]: I1006 15:17:01.397450 4757 scope.go:117] "RemoveContainer" containerID="b3479f22dfd2a5e1740507e35e73af7e872b1980b9dfdee3880aa54f474e5cf6" Oct 06 15:17:01 crc kubenswrapper[4757]: E1006 15:17:01.397870 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b3479f22dfd2a5e1740507e35e73af7e872b1980b9dfdee3880aa54f474e5cf6\": container with ID starting with b3479f22dfd2a5e1740507e35e73af7e872b1980b9dfdee3880aa54f474e5cf6 not found: ID does not exist" containerID="b3479f22dfd2a5e1740507e35e73af7e872b1980b9dfdee3880aa54f474e5cf6" Oct 06 15:17:01 crc kubenswrapper[4757]: I1006 15:17:01.397899 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b3479f22dfd2a5e1740507e35e73af7e872b1980b9dfdee3880aa54f474e5cf6"} err="failed to get container status \"b3479f22dfd2a5e1740507e35e73af7e872b1980b9dfdee3880aa54f474e5cf6\": rpc error: code = NotFound desc = could not find container \"b3479f22dfd2a5e1740507e35e73af7e872b1980b9dfdee3880aa54f474e5cf6\": container with ID starting with b3479f22dfd2a5e1740507e35e73af7e872b1980b9dfdee3880aa54f474e5cf6 not found: ID does not exist" Oct 06 15:17:01 crc kubenswrapper[4757]: E1006 15:17:01.543743 4757 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.199:51766->38.102.83.199:42407: write tcp 38.102.83.199:51766->38.102.83.199:42407: write: broken pipe Oct 06 15:17:02 crc kubenswrapper[4757]: I1006 15:17:02.190980 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7726637f-1e62-4443-a074-bc56f793171f" path="/var/lib/kubelet/pods/7726637f-1e62-4443-a074-bc56f793171f/volumes" Oct 06 15:17:10 crc kubenswrapper[4757]: I1006 15:17:10.034650 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-b3d9-account-create-sfxqf"] Oct 06 15:17:10 crc kubenswrapper[4757]: I1006 15:17:10.048249 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-b3d9-account-create-sfxqf"] Oct 06 15:17:10 crc kubenswrapper[4757]: I1006 15:17:10.180376 4757 scope.go:117] "RemoveContainer" containerID="c9a01d3b8e651ed2a273ac73def6d0e86398bfbdd3b94e4d6d085ceec8dc0a3f" Oct 06 15:17:10 crc kubenswrapper[4757]: E1006 15:17:10.181370 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 15:17:10 crc kubenswrapper[4757]: I1006 15:17:10.190061 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="698f058d-bce6-40a2-ad66-9cb2a8990e42" path="/var/lib/kubelet/pods/698f058d-bce6-40a2-ad66-9cb2a8990e42/volumes" Oct 06 15:17:17 crc kubenswrapper[4757]: I1006 15:17:17.039505 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-qmc6b"] Oct 06 15:17:17 crc kubenswrapper[4757]: I1006 15:17:17.045841 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-qmc6b"] Oct 06 15:17:18 crc kubenswrapper[4757]: I1006 15:17:18.192367 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8a6135bf-bb76-48a9-88a5-b6271ebc3bf9" path="/var/lib/kubelet/pods/8a6135bf-bb76-48a9-88a5-b6271ebc3bf9/volumes" Oct 06 15:17:21 crc kubenswrapper[4757]: I1006 15:17:21.179705 4757 scope.go:117] "RemoveContainer" containerID="c9a01d3b8e651ed2a273ac73def6d0e86398bfbdd3b94e4d6d085ceec8dc0a3f" Oct 06 15:17:21 crc kubenswrapper[4757]: E1006 15:17:21.180365 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 15:17:31 crc kubenswrapper[4757]: I1006 15:17:31.044273 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-7pmh5"] Oct 06 15:17:31 crc kubenswrapper[4757]: I1006 15:17:31.052246 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-7pmh5"] Oct 06 15:17:32 crc kubenswrapper[4757]: I1006 15:17:32.221930 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7155f33d-aad8-4689-990c-799c2c1ab159" path="/var/lib/kubelet/pods/7155f33d-aad8-4689-990c-799c2c1ab159/volumes" Oct 06 15:17:33 crc kubenswrapper[4757]: I1006 15:17:33.179998 4757 scope.go:117] "RemoveContainer" containerID="c9a01d3b8e651ed2a273ac73def6d0e86398bfbdd3b94e4d6d085ceec8dc0a3f" Oct 06 15:17:33 crc kubenswrapper[4757]: E1006 15:17:33.180453 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 15:17:38 crc kubenswrapper[4757]: I1006 15:17:38.677915 4757 scope.go:117] "RemoveContainer" containerID="0e985ccff7137a04b31b7e56062fd78965b722a1f2459cb5aa58ee1aa0d49be6" Oct 06 15:17:38 crc kubenswrapper[4757]: I1006 15:17:38.716065 4757 scope.go:117] "RemoveContainer" containerID="b04fd2f1c41fba26a5362c94e6ce7af9a8cc429a99d19aad799e2c6e7650f018" Oct 06 15:17:38 crc kubenswrapper[4757]: I1006 15:17:38.786160 4757 scope.go:117] "RemoveContainer" containerID="201963f22ea1299ade21109b13ea451618acaa4fcf5810a182449a48b1b1823e" Oct 06 15:17:38 crc kubenswrapper[4757]: I1006 15:17:38.836366 4757 scope.go:117] "RemoveContainer" containerID="843427f6731161d2a8add1b96ddc64a45e59c1e77d894b3250f6609671a89283" Oct 06 15:17:45 crc kubenswrapper[4757]: I1006 15:17:45.180322 4757 scope.go:117] "RemoveContainer" containerID="c9a01d3b8e651ed2a273ac73def6d0e86398bfbdd3b94e4d6d085ceec8dc0a3f" Oct 06 15:17:45 crc kubenswrapper[4757]: E1006 15:17:45.180919 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 15:17:57 crc kubenswrapper[4757]: I1006 15:17:57.179899 4757 scope.go:117] "RemoveContainer" containerID="c9a01d3b8e651ed2a273ac73def6d0e86398bfbdd3b94e4d6d085ceec8dc0a3f" Oct 06 15:17:57 crc kubenswrapper[4757]: E1006 15:17:57.180704 4757 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7tb7h_openshift-machine-config-operator(0010c888-d5ad-4b2b-8309-1647fdf0dee3)\"" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" Oct 06 15:18:08 crc kubenswrapper[4757]: I1006 15:18:08.180523 4757 scope.go:117] "RemoveContainer" containerID="c9a01d3b8e651ed2a273ac73def6d0e86398bfbdd3b94e4d6d085ceec8dc0a3f" Oct 06 15:18:08 crc kubenswrapper[4757]: I1006 15:18:08.949427 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" event={"ID":"0010c888-d5ad-4b2b-8309-1647fdf0dee3","Type":"ContainerStarted","Data":"7cb55013cb2aea12bd88236eba1334888b60066fb1e3d42b1def202581ae4466"} Oct 06 15:18:15 crc kubenswrapper[4757]: I1006 15:18:15.009392 4757 generic.go:334] "Generic (PLEG): container finished" podID="bdf271a6-49e6-4532-90f2-ccf8971a0431" containerID="dbb9853c9306728a93f88448a5352b7cf00c08eb20575a4dcd489cf02379f664" exitCode=0 Oct 06 15:18:15 crc kubenswrapper[4757]: I1006 15:18:15.009443 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-25nfh/must-gather-2vkcl" event={"ID":"bdf271a6-49e6-4532-90f2-ccf8971a0431","Type":"ContainerDied","Data":"dbb9853c9306728a93f88448a5352b7cf00c08eb20575a4dcd489cf02379f664"} Oct 06 15:18:15 crc kubenswrapper[4757]: I1006 15:18:15.010493 4757 scope.go:117] "RemoveContainer" containerID="dbb9853c9306728a93f88448a5352b7cf00c08eb20575a4dcd489cf02379f664" Oct 06 15:18:15 crc kubenswrapper[4757]: I1006 15:18:15.658160 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-25nfh_must-gather-2vkcl_bdf271a6-49e6-4532-90f2-ccf8971a0431/gather/0.log" Oct 06 15:18:22 crc kubenswrapper[4757]: I1006 15:18:22.787816 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-25nfh/must-gather-2vkcl"] Oct 06 15:18:22 crc kubenswrapper[4757]: I1006 15:18:22.789190 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-25nfh/must-gather-2vkcl" podUID="bdf271a6-49e6-4532-90f2-ccf8971a0431" containerName="copy" containerID="cri-o://98f2d3b4ac279bf321c96ddeafb44be6de65238f5fe2cdc773fd523fadbb24d7" gracePeriod=2 Oct 06 15:18:22 crc kubenswrapper[4757]: I1006 15:18:22.801446 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-25nfh/must-gather-2vkcl"] Oct 06 15:18:23 crc kubenswrapper[4757]: I1006 15:18:23.081766 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-25nfh_must-gather-2vkcl_bdf271a6-49e6-4532-90f2-ccf8971a0431/copy/0.log" Oct 06 15:18:23 crc kubenswrapper[4757]: I1006 15:18:23.082476 4757 generic.go:334] "Generic (PLEG): container finished" podID="bdf271a6-49e6-4532-90f2-ccf8971a0431" containerID="98f2d3b4ac279bf321c96ddeafb44be6de65238f5fe2cdc773fd523fadbb24d7" exitCode=143 Oct 06 15:18:23 crc kubenswrapper[4757]: I1006 15:18:23.315831 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-25nfh_must-gather-2vkcl_bdf271a6-49e6-4532-90f2-ccf8971a0431/copy/0.log" Oct 06 15:18:23 crc kubenswrapper[4757]: I1006 15:18:23.316914 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-25nfh/must-gather-2vkcl" Oct 06 15:18:23 crc kubenswrapper[4757]: I1006 15:18:23.376995 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b7hfw\" (UniqueName: \"kubernetes.io/projected/bdf271a6-49e6-4532-90f2-ccf8971a0431-kube-api-access-b7hfw\") pod \"bdf271a6-49e6-4532-90f2-ccf8971a0431\" (UID: \"bdf271a6-49e6-4532-90f2-ccf8971a0431\") " Oct 06 15:18:23 crc kubenswrapper[4757]: I1006 15:18:23.377236 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/bdf271a6-49e6-4532-90f2-ccf8971a0431-must-gather-output\") pod \"bdf271a6-49e6-4532-90f2-ccf8971a0431\" (UID: \"bdf271a6-49e6-4532-90f2-ccf8971a0431\") " Oct 06 15:18:23 crc kubenswrapper[4757]: I1006 15:18:23.382313 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bdf271a6-49e6-4532-90f2-ccf8971a0431-kube-api-access-b7hfw" (OuterVolumeSpecName: "kube-api-access-b7hfw") pod "bdf271a6-49e6-4532-90f2-ccf8971a0431" (UID: "bdf271a6-49e6-4532-90f2-ccf8971a0431"). InnerVolumeSpecName "kube-api-access-b7hfw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 15:18:23 crc kubenswrapper[4757]: I1006 15:18:23.481278 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b7hfw\" (UniqueName: \"kubernetes.io/projected/bdf271a6-49e6-4532-90f2-ccf8971a0431-kube-api-access-b7hfw\") on node \"crc\" DevicePath \"\"" Oct 06 15:18:23 crc kubenswrapper[4757]: I1006 15:18:23.503623 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bdf271a6-49e6-4532-90f2-ccf8971a0431-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "bdf271a6-49e6-4532-90f2-ccf8971a0431" (UID: "bdf271a6-49e6-4532-90f2-ccf8971a0431"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 15:18:23 crc kubenswrapper[4757]: I1006 15:18:23.582966 4757 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/bdf271a6-49e6-4532-90f2-ccf8971a0431-must-gather-output\") on node \"crc\" DevicePath \"\"" Oct 06 15:18:24 crc kubenswrapper[4757]: I1006 15:18:24.113080 4757 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-25nfh_must-gather-2vkcl_bdf271a6-49e6-4532-90f2-ccf8971a0431/copy/0.log" Oct 06 15:18:24 crc kubenswrapper[4757]: I1006 15:18:24.114683 4757 scope.go:117] "RemoveContainer" containerID="98f2d3b4ac279bf321c96ddeafb44be6de65238f5fe2cdc773fd523fadbb24d7" Oct 06 15:18:24 crc kubenswrapper[4757]: I1006 15:18:24.114739 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-25nfh/must-gather-2vkcl" Oct 06 15:18:24 crc kubenswrapper[4757]: I1006 15:18:24.151344 4757 scope.go:117] "RemoveContainer" containerID="dbb9853c9306728a93f88448a5352b7cf00c08eb20575a4dcd489cf02379f664" Oct 06 15:18:24 crc kubenswrapper[4757]: I1006 15:18:24.192051 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bdf271a6-49e6-4532-90f2-ccf8971a0431" path="/var/lib/kubelet/pods/bdf271a6-49e6-4532-90f2-ccf8971a0431/volumes" Oct 06 15:19:38 crc kubenswrapper[4757]: I1006 15:19:38.986852 4757 scope.go:117] "RemoveContainer" containerID="f27b6e7ee1ae9de3c0de1681575597e783fe8b25fb0e506e3089c0f4c2f33e8c" Oct 06 15:19:45 crc kubenswrapper[4757]: I1006 15:19:45.047220 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-xc9j5"] Oct 06 15:19:45 crc kubenswrapper[4757]: I1006 15:19:45.053709 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-xc9j5"] Oct 06 15:19:46 crc kubenswrapper[4757]: I1006 15:19:46.194782 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8aceb73c-695c-45ef-a519-3648dc1defda" path="/var/lib/kubelet/pods/8aceb73c-695c-45ef-a519-3648dc1defda/volumes" Oct 06 15:19:54 crc kubenswrapper[4757]: I1006 15:19:54.092440 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-5168-account-create-f2vck"] Oct 06 15:19:54 crc kubenswrapper[4757]: I1006 15:19:54.098568 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-5168-account-create-f2vck"] Oct 06 15:19:54 crc kubenswrapper[4757]: I1006 15:19:54.193881 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3eafe25b-cbf4-46ea-9b52-4b8244574443" path="/var/lib/kubelet/pods/3eafe25b-cbf4-46ea-9b52-4b8244574443/volumes" Oct 06 15:20:02 crc kubenswrapper[4757]: I1006 15:20:02.044833 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-tl5mh"] Oct 06 15:20:02 crc kubenswrapper[4757]: I1006 15:20:02.072026 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-tl5mh"] Oct 06 15:20:02 crc kubenswrapper[4757]: I1006 15:20:02.195140 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d640aee-a2ac-4541-9a43-6db175d7ac71" path="/var/lib/kubelet/pods/1d640aee-a2ac-4541-9a43-6db175d7ac71/volumes" Oct 06 15:20:25 crc kubenswrapper[4757]: I1006 15:20:25.093470 4757 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-ssmdb"] Oct 06 15:20:25 crc kubenswrapper[4757]: E1006 15:20:25.094296 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bdf271a6-49e6-4532-90f2-ccf8971a0431" containerName="gather" Oct 06 15:20:25 crc kubenswrapper[4757]: I1006 15:20:25.094309 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="bdf271a6-49e6-4532-90f2-ccf8971a0431" containerName="gather" Oct 06 15:20:25 crc kubenswrapper[4757]: E1006 15:20:25.094330 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bdf271a6-49e6-4532-90f2-ccf8971a0431" containerName="copy" Oct 06 15:20:25 crc kubenswrapper[4757]: I1006 15:20:25.094336 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="bdf271a6-49e6-4532-90f2-ccf8971a0431" containerName="copy" Oct 06 15:20:25 crc kubenswrapper[4757]: E1006 15:20:25.094349 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7726637f-1e62-4443-a074-bc56f793171f" containerName="registry-server" Oct 06 15:20:25 crc kubenswrapper[4757]: I1006 15:20:25.094356 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="7726637f-1e62-4443-a074-bc56f793171f" containerName="registry-server" Oct 06 15:20:25 crc kubenswrapper[4757]: E1006 15:20:25.094364 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7726637f-1e62-4443-a074-bc56f793171f" containerName="extract-utilities" Oct 06 15:20:25 crc kubenswrapper[4757]: I1006 15:20:25.094370 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="7726637f-1e62-4443-a074-bc56f793171f" containerName="extract-utilities" Oct 06 15:20:25 crc kubenswrapper[4757]: E1006 15:20:25.094386 4757 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7726637f-1e62-4443-a074-bc56f793171f" containerName="extract-content" Oct 06 15:20:25 crc kubenswrapper[4757]: I1006 15:20:25.094392 4757 state_mem.go:107] "Deleted CPUSet assignment" podUID="7726637f-1e62-4443-a074-bc56f793171f" containerName="extract-content" Oct 06 15:20:25 crc kubenswrapper[4757]: I1006 15:20:25.094567 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="bdf271a6-49e6-4532-90f2-ccf8971a0431" containerName="gather" Oct 06 15:20:25 crc kubenswrapper[4757]: I1006 15:20:25.094578 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="7726637f-1e62-4443-a074-bc56f793171f" containerName="registry-server" Oct 06 15:20:25 crc kubenswrapper[4757]: I1006 15:20:25.094593 4757 memory_manager.go:354] "RemoveStaleState removing state" podUID="bdf271a6-49e6-4532-90f2-ccf8971a0431" containerName="copy" Oct 06 15:20:25 crc kubenswrapper[4757]: I1006 15:20:25.095905 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-ssmdb" Oct 06 15:20:25 crc kubenswrapper[4757]: I1006 15:20:25.127198 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-ssmdb"] Oct 06 15:20:25 crc kubenswrapper[4757]: I1006 15:20:25.160248 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9cdcee62-ddf0-4dae-b634-3ba144ab2032-catalog-content\") pod \"certified-operators-ssmdb\" (UID: \"9cdcee62-ddf0-4dae-b634-3ba144ab2032\") " pod="openshift-marketplace/certified-operators-ssmdb" Oct 06 15:20:25 crc kubenswrapper[4757]: I1006 15:20:25.160303 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kcwfz\" (UniqueName: \"kubernetes.io/projected/9cdcee62-ddf0-4dae-b634-3ba144ab2032-kube-api-access-kcwfz\") pod \"certified-operators-ssmdb\" (UID: \"9cdcee62-ddf0-4dae-b634-3ba144ab2032\") " pod="openshift-marketplace/certified-operators-ssmdb" Oct 06 15:20:25 crc kubenswrapper[4757]: I1006 15:20:25.160419 4757 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9cdcee62-ddf0-4dae-b634-3ba144ab2032-utilities\") pod \"certified-operators-ssmdb\" (UID: \"9cdcee62-ddf0-4dae-b634-3ba144ab2032\") " pod="openshift-marketplace/certified-operators-ssmdb" Oct 06 15:20:25 crc kubenswrapper[4757]: I1006 15:20:25.261445 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9cdcee62-ddf0-4dae-b634-3ba144ab2032-utilities\") pod \"certified-operators-ssmdb\" (UID: \"9cdcee62-ddf0-4dae-b634-3ba144ab2032\") " pod="openshift-marketplace/certified-operators-ssmdb" Oct 06 15:20:25 crc kubenswrapper[4757]: I1006 15:20:25.261563 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9cdcee62-ddf0-4dae-b634-3ba144ab2032-catalog-content\") pod \"certified-operators-ssmdb\" (UID: \"9cdcee62-ddf0-4dae-b634-3ba144ab2032\") " pod="openshift-marketplace/certified-operators-ssmdb" Oct 06 15:20:25 crc kubenswrapper[4757]: I1006 15:20:25.261592 4757 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kcwfz\" (UniqueName: \"kubernetes.io/projected/9cdcee62-ddf0-4dae-b634-3ba144ab2032-kube-api-access-kcwfz\") pod \"certified-operators-ssmdb\" (UID: \"9cdcee62-ddf0-4dae-b634-3ba144ab2032\") " pod="openshift-marketplace/certified-operators-ssmdb" Oct 06 15:20:25 crc kubenswrapper[4757]: I1006 15:20:25.261901 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9cdcee62-ddf0-4dae-b634-3ba144ab2032-utilities\") pod \"certified-operators-ssmdb\" (UID: \"9cdcee62-ddf0-4dae-b634-3ba144ab2032\") " pod="openshift-marketplace/certified-operators-ssmdb" Oct 06 15:20:25 crc kubenswrapper[4757]: I1006 15:20:25.262234 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9cdcee62-ddf0-4dae-b634-3ba144ab2032-catalog-content\") pod \"certified-operators-ssmdb\" (UID: \"9cdcee62-ddf0-4dae-b634-3ba144ab2032\") " pod="openshift-marketplace/certified-operators-ssmdb" Oct 06 15:20:25 crc kubenswrapper[4757]: I1006 15:20:25.284330 4757 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kcwfz\" (UniqueName: \"kubernetes.io/projected/9cdcee62-ddf0-4dae-b634-3ba144ab2032-kube-api-access-kcwfz\") pod \"certified-operators-ssmdb\" (UID: \"9cdcee62-ddf0-4dae-b634-3ba144ab2032\") " pod="openshift-marketplace/certified-operators-ssmdb" Oct 06 15:20:25 crc kubenswrapper[4757]: I1006 15:20:25.424057 4757 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-ssmdb" Oct 06 15:20:25 crc kubenswrapper[4757]: I1006 15:20:25.958183 4757 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-ssmdb"] Oct 06 15:20:26 crc kubenswrapper[4757]: I1006 15:20:26.293056 4757 generic.go:334] "Generic (PLEG): container finished" podID="9cdcee62-ddf0-4dae-b634-3ba144ab2032" containerID="711ef133047f6a62112311de8d78e61ea136e4c12821f422d10143c7a586287a" exitCode=0 Oct 06 15:20:26 crc kubenswrapper[4757]: I1006 15:20:26.293132 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ssmdb" event={"ID":"9cdcee62-ddf0-4dae-b634-3ba144ab2032","Type":"ContainerDied","Data":"711ef133047f6a62112311de8d78e61ea136e4c12821f422d10143c7a586287a"} Oct 06 15:20:26 crc kubenswrapper[4757]: I1006 15:20:26.293352 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ssmdb" event={"ID":"9cdcee62-ddf0-4dae-b634-3ba144ab2032","Type":"ContainerStarted","Data":"26b54dd0cf166b05e18ced8c261cbe54d429f323c820d0160718a6b23168aa66"} Oct 06 15:20:26 crc kubenswrapper[4757]: I1006 15:20:26.294615 4757 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 06 15:20:28 crc kubenswrapper[4757]: I1006 15:20:28.320268 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ssmdb" event={"ID":"9cdcee62-ddf0-4dae-b634-3ba144ab2032","Type":"ContainerDied","Data":"387fe8c73c8346e54fd4c7f6bd36917470e5da1166a4ab041a629995ee1b32a1"} Oct 06 15:20:28 crc kubenswrapper[4757]: I1006 15:20:28.320070 4757 generic.go:334] "Generic (PLEG): container finished" podID="9cdcee62-ddf0-4dae-b634-3ba144ab2032" containerID="387fe8c73c8346e54fd4c7f6bd36917470e5da1166a4ab041a629995ee1b32a1" exitCode=0 Oct 06 15:20:29 crc kubenswrapper[4757]: I1006 15:20:29.334027 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ssmdb" event={"ID":"9cdcee62-ddf0-4dae-b634-3ba144ab2032","Type":"ContainerStarted","Data":"552c8b3a4cda4273ddd258e1cea0b3d18b36682f593f34887ee0313c1a869c1e"} Oct 06 15:20:29 crc kubenswrapper[4757]: I1006 15:20:29.357596 4757 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-ssmdb" podStartSLOduration=1.7919393559999999 podStartE2EDuration="4.357571351s" podCreationTimestamp="2025-10-06 15:20:25 +0000 UTC" firstStartedPulling="2025-10-06 15:20:26.29443153 +0000 UTC m=+6114.791750067" lastFinishedPulling="2025-10-06 15:20:28.860063505 +0000 UTC m=+6117.357382062" observedRunningTime="2025-10-06 15:20:29.353660136 +0000 UTC m=+6117.850978713" watchObservedRunningTime="2025-10-06 15:20:29.357571351 +0000 UTC m=+6117.854889928" Oct 06 15:20:34 crc kubenswrapper[4757]: I1006 15:20:34.360968 4757 patch_prober.go:28] interesting pod/machine-config-daemon-7tb7h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 06 15:20:34 crc kubenswrapper[4757]: I1006 15:20:34.361634 4757 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 06 15:20:35 crc kubenswrapper[4757]: I1006 15:20:35.425387 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-ssmdb" Oct 06 15:20:35 crc kubenswrapper[4757]: I1006 15:20:35.425713 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-ssmdb" Oct 06 15:20:35 crc kubenswrapper[4757]: I1006 15:20:35.494975 4757 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-ssmdb" Oct 06 15:20:36 crc kubenswrapper[4757]: I1006 15:20:36.466945 4757 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-ssmdb" Oct 06 15:20:36 crc kubenswrapper[4757]: I1006 15:20:36.530271 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-ssmdb"] Oct 06 15:20:38 crc kubenswrapper[4757]: I1006 15:20:38.421465 4757 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-ssmdb" podUID="9cdcee62-ddf0-4dae-b634-3ba144ab2032" containerName="registry-server" containerID="cri-o://552c8b3a4cda4273ddd258e1cea0b3d18b36682f593f34887ee0313c1a869c1e" gracePeriod=2 Oct 06 15:20:39 crc kubenswrapper[4757]: I1006 15:20:39.064089 4757 scope.go:117] "RemoveContainer" containerID="d5b021e6d04126c00c4162d73e7573d2199f9679817b63f48013fb7181f5e4ba" Oct 06 15:20:39 crc kubenswrapper[4757]: I1006 15:20:39.082857 4757 scope.go:117] "RemoveContainer" containerID="6d022086a00e786b030c46c45b829918864c63bc7f168fba029e1f93129a7550" Oct 06 15:20:39 crc kubenswrapper[4757]: I1006 15:20:39.204553 4757 scope.go:117] "RemoveContainer" containerID="49204f1162ba02c3146fb27401755b6f19b059976c5610b970449461bec2c4c9" Oct 06 15:20:39 crc kubenswrapper[4757]: I1006 15:20:39.415289 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-ssmdb" Oct 06 15:20:39 crc kubenswrapper[4757]: I1006 15:20:39.446797 4757 generic.go:334] "Generic (PLEG): container finished" podID="9cdcee62-ddf0-4dae-b634-3ba144ab2032" containerID="552c8b3a4cda4273ddd258e1cea0b3d18b36682f593f34887ee0313c1a869c1e" exitCode=0 Oct 06 15:20:39 crc kubenswrapper[4757]: I1006 15:20:39.446875 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ssmdb" event={"ID":"9cdcee62-ddf0-4dae-b634-3ba144ab2032","Type":"ContainerDied","Data":"552c8b3a4cda4273ddd258e1cea0b3d18b36682f593f34887ee0313c1a869c1e"} Oct 06 15:20:39 crc kubenswrapper[4757]: I1006 15:20:39.446898 4757 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ssmdb" event={"ID":"9cdcee62-ddf0-4dae-b634-3ba144ab2032","Type":"ContainerDied","Data":"26b54dd0cf166b05e18ced8c261cbe54d429f323c820d0160718a6b23168aa66"} Oct 06 15:20:39 crc kubenswrapper[4757]: I1006 15:20:39.446920 4757 scope.go:117] "RemoveContainer" containerID="552c8b3a4cda4273ddd258e1cea0b3d18b36682f593f34887ee0313c1a869c1e" Oct 06 15:20:39 crc kubenswrapper[4757]: I1006 15:20:39.447033 4757 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-ssmdb" Oct 06 15:20:39 crc kubenswrapper[4757]: I1006 15:20:39.467161 4757 scope.go:117] "RemoveContainer" containerID="387fe8c73c8346e54fd4c7f6bd36917470e5da1166a4ab041a629995ee1b32a1" Oct 06 15:20:39 crc kubenswrapper[4757]: I1006 15:20:39.496285 4757 scope.go:117] "RemoveContainer" containerID="711ef133047f6a62112311de8d78e61ea136e4c12821f422d10143c7a586287a" Oct 06 15:20:39 crc kubenswrapper[4757]: I1006 15:20:39.516948 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9cdcee62-ddf0-4dae-b634-3ba144ab2032-catalog-content\") pod \"9cdcee62-ddf0-4dae-b634-3ba144ab2032\" (UID: \"9cdcee62-ddf0-4dae-b634-3ba144ab2032\") " Oct 06 15:20:39 crc kubenswrapper[4757]: I1006 15:20:39.517186 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9cdcee62-ddf0-4dae-b634-3ba144ab2032-utilities\") pod \"9cdcee62-ddf0-4dae-b634-3ba144ab2032\" (UID: \"9cdcee62-ddf0-4dae-b634-3ba144ab2032\") " Oct 06 15:20:39 crc kubenswrapper[4757]: I1006 15:20:39.517214 4757 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kcwfz\" (UniqueName: \"kubernetes.io/projected/9cdcee62-ddf0-4dae-b634-3ba144ab2032-kube-api-access-kcwfz\") pod \"9cdcee62-ddf0-4dae-b634-3ba144ab2032\" (UID: \"9cdcee62-ddf0-4dae-b634-3ba144ab2032\") " Oct 06 15:20:39 crc kubenswrapper[4757]: I1006 15:20:39.519086 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9cdcee62-ddf0-4dae-b634-3ba144ab2032-utilities" (OuterVolumeSpecName: "utilities") pod "9cdcee62-ddf0-4dae-b634-3ba144ab2032" (UID: "9cdcee62-ddf0-4dae-b634-3ba144ab2032"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 15:20:39 crc kubenswrapper[4757]: I1006 15:20:39.523436 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9cdcee62-ddf0-4dae-b634-3ba144ab2032-kube-api-access-kcwfz" (OuterVolumeSpecName: "kube-api-access-kcwfz") pod "9cdcee62-ddf0-4dae-b634-3ba144ab2032" (UID: "9cdcee62-ddf0-4dae-b634-3ba144ab2032"). InnerVolumeSpecName "kube-api-access-kcwfz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 06 15:20:39 crc kubenswrapper[4757]: I1006 15:20:39.532902 4757 scope.go:117] "RemoveContainer" containerID="552c8b3a4cda4273ddd258e1cea0b3d18b36682f593f34887ee0313c1a869c1e" Oct 06 15:20:39 crc kubenswrapper[4757]: E1006 15:20:39.533479 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"552c8b3a4cda4273ddd258e1cea0b3d18b36682f593f34887ee0313c1a869c1e\": container with ID starting with 552c8b3a4cda4273ddd258e1cea0b3d18b36682f593f34887ee0313c1a869c1e not found: ID does not exist" containerID="552c8b3a4cda4273ddd258e1cea0b3d18b36682f593f34887ee0313c1a869c1e" Oct 06 15:20:39 crc kubenswrapper[4757]: I1006 15:20:39.533515 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"552c8b3a4cda4273ddd258e1cea0b3d18b36682f593f34887ee0313c1a869c1e"} err="failed to get container status \"552c8b3a4cda4273ddd258e1cea0b3d18b36682f593f34887ee0313c1a869c1e\": rpc error: code = NotFound desc = could not find container \"552c8b3a4cda4273ddd258e1cea0b3d18b36682f593f34887ee0313c1a869c1e\": container with ID starting with 552c8b3a4cda4273ddd258e1cea0b3d18b36682f593f34887ee0313c1a869c1e not found: ID does not exist" Oct 06 15:20:39 crc kubenswrapper[4757]: I1006 15:20:39.533542 4757 scope.go:117] "RemoveContainer" containerID="387fe8c73c8346e54fd4c7f6bd36917470e5da1166a4ab041a629995ee1b32a1" Oct 06 15:20:39 crc kubenswrapper[4757]: E1006 15:20:39.533890 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"387fe8c73c8346e54fd4c7f6bd36917470e5da1166a4ab041a629995ee1b32a1\": container with ID starting with 387fe8c73c8346e54fd4c7f6bd36917470e5da1166a4ab041a629995ee1b32a1 not found: ID does not exist" containerID="387fe8c73c8346e54fd4c7f6bd36917470e5da1166a4ab041a629995ee1b32a1" Oct 06 15:20:39 crc kubenswrapper[4757]: I1006 15:20:39.533921 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"387fe8c73c8346e54fd4c7f6bd36917470e5da1166a4ab041a629995ee1b32a1"} err="failed to get container status \"387fe8c73c8346e54fd4c7f6bd36917470e5da1166a4ab041a629995ee1b32a1\": rpc error: code = NotFound desc = could not find container \"387fe8c73c8346e54fd4c7f6bd36917470e5da1166a4ab041a629995ee1b32a1\": container with ID starting with 387fe8c73c8346e54fd4c7f6bd36917470e5da1166a4ab041a629995ee1b32a1 not found: ID does not exist" Oct 06 15:20:39 crc kubenswrapper[4757]: I1006 15:20:39.533940 4757 scope.go:117] "RemoveContainer" containerID="711ef133047f6a62112311de8d78e61ea136e4c12821f422d10143c7a586287a" Oct 06 15:20:39 crc kubenswrapper[4757]: E1006 15:20:39.534196 4757 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"711ef133047f6a62112311de8d78e61ea136e4c12821f422d10143c7a586287a\": container with ID starting with 711ef133047f6a62112311de8d78e61ea136e4c12821f422d10143c7a586287a not found: ID does not exist" containerID="711ef133047f6a62112311de8d78e61ea136e4c12821f422d10143c7a586287a" Oct 06 15:20:39 crc kubenswrapper[4757]: I1006 15:20:39.534225 4757 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"711ef133047f6a62112311de8d78e61ea136e4c12821f422d10143c7a586287a"} err="failed to get container status \"711ef133047f6a62112311de8d78e61ea136e4c12821f422d10143c7a586287a\": rpc error: code = NotFound desc = could not find container \"711ef133047f6a62112311de8d78e61ea136e4c12821f422d10143c7a586287a\": container with ID starting with 711ef133047f6a62112311de8d78e61ea136e4c12821f422d10143c7a586287a not found: ID does not exist" Oct 06 15:20:39 crc kubenswrapper[4757]: I1006 15:20:39.564303 4757 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9cdcee62-ddf0-4dae-b634-3ba144ab2032-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9cdcee62-ddf0-4dae-b634-3ba144ab2032" (UID: "9cdcee62-ddf0-4dae-b634-3ba144ab2032"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 06 15:20:39 crc kubenswrapper[4757]: I1006 15:20:39.619655 4757 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9cdcee62-ddf0-4dae-b634-3ba144ab2032-utilities\") on node \"crc\" DevicePath \"\"" Oct 06 15:20:39 crc kubenswrapper[4757]: I1006 15:20:39.619704 4757 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kcwfz\" (UniqueName: \"kubernetes.io/projected/9cdcee62-ddf0-4dae-b634-3ba144ab2032-kube-api-access-kcwfz\") on node \"crc\" DevicePath \"\"" Oct 06 15:20:39 crc kubenswrapper[4757]: I1006 15:20:39.619722 4757 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9cdcee62-ddf0-4dae-b634-3ba144ab2032-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 06 15:20:39 crc kubenswrapper[4757]: I1006 15:20:39.787171 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-ssmdb"] Oct 06 15:20:39 crc kubenswrapper[4757]: I1006 15:20:39.794199 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-ssmdb"] Oct 06 15:20:40 crc kubenswrapper[4757]: I1006 15:20:40.198411 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9cdcee62-ddf0-4dae-b634-3ba144ab2032" path="/var/lib/kubelet/pods/9cdcee62-ddf0-4dae-b634-3ba144ab2032/volumes" Oct 06 15:20:43 crc kubenswrapper[4757]: I1006 15:20:43.044968 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-rvnm9"] Oct 06 15:20:43 crc kubenswrapper[4757]: I1006 15:20:43.055856 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-rvnm9"] Oct 06 15:20:44 crc kubenswrapper[4757]: I1006 15:20:44.202836 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7fa8484b-cc8e-4b19-a476-d3cec87675a3" path="/var/lib/kubelet/pods/7fa8484b-cc8e-4b19-a476-d3cec87675a3/volumes" Oct 06 15:20:52 crc kubenswrapper[4757]: I1006 15:20:52.052499 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-528f-account-create-xkcdt"] Oct 06 15:20:52 crc kubenswrapper[4757]: I1006 15:20:52.059788 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-528f-account-create-xkcdt"] Oct 06 15:20:52 crc kubenswrapper[4757]: I1006 15:20:52.194032 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="95694796-073a-422c-83d8-97a9a5b7e000" path="/var/lib/kubelet/pods/95694796-073a-422c-83d8-97a9a5b7e000/volumes" Oct 06 15:21:01 crc kubenswrapper[4757]: I1006 15:21:01.027085 4757 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-n59jv"] Oct 06 15:21:01 crc kubenswrapper[4757]: I1006 15:21:01.039616 4757 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-n59jv"] Oct 06 15:21:02 crc kubenswrapper[4757]: I1006 15:21:02.196016 4757 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e9b02124-fbb0-4055-9eb0-7e7f39db93e3" path="/var/lib/kubelet/pods/e9b02124-fbb0-4055-9eb0-7e7f39db93e3/volumes" Oct 06 15:21:04 crc kubenswrapper[4757]: I1006 15:21:04.361536 4757 patch_prober.go:28] interesting pod/machine-config-daemon-7tb7h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 06 15:21:04 crc kubenswrapper[4757]: I1006 15:21:04.361823 4757 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7tb7h" podUID="0010c888-d5ad-4b2b-8309-1647fdf0dee3" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515070757165024462 0ustar coreroot‹íÁ  ÷Om7 €7šÞ'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015070757166017400 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015070742601016507 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015070742601015457 5ustar corecore